Cleaned up the directories
This commit is contained in:
parent
f708506d68
commit
a683fcffea
1340 changed files with 554582 additions and 6840 deletions
|
@ -0,0 +1,62 @@
|
|||
# engine/__init__.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""SQL connections, SQL execution and high-level DB-API interface.
|
||||
|
||||
The engine package defines the basic components used to interface
|
||||
DB-API modules with higher-level statement construction,
|
||||
connection-management, execution and result contexts. The primary
|
||||
"entry point" class into this package is the Engine and its public
|
||||
constructor ``create_engine()``.
|
||||
|
||||
"""
|
||||
|
||||
from . import events as events
|
||||
from . import util as util
|
||||
from .base import Connection as Connection
|
||||
from .base import Engine as Engine
|
||||
from .base import NestedTransaction as NestedTransaction
|
||||
from .base import RootTransaction as RootTransaction
|
||||
from .base import Transaction as Transaction
|
||||
from .base import TwoPhaseTransaction as TwoPhaseTransaction
|
||||
from .create import create_engine as create_engine
|
||||
from .create import create_pool_from_url as create_pool_from_url
|
||||
from .create import engine_from_config as engine_from_config
|
||||
from .cursor import CursorResult as CursorResult
|
||||
from .cursor import ResultProxy as ResultProxy
|
||||
from .interfaces import AdaptedConnection as AdaptedConnection
|
||||
from .interfaces import BindTyping as BindTyping
|
||||
from .interfaces import Compiled as Compiled
|
||||
from .interfaces import Connectable as Connectable
|
||||
from .interfaces import ConnectArgsType as ConnectArgsType
|
||||
from .interfaces import ConnectionEventsTarget as ConnectionEventsTarget
|
||||
from .interfaces import CreateEnginePlugin as CreateEnginePlugin
|
||||
from .interfaces import Dialect as Dialect
|
||||
from .interfaces import ExceptionContext as ExceptionContext
|
||||
from .interfaces import ExecutionContext as ExecutionContext
|
||||
from .interfaces import TypeCompiler as TypeCompiler
|
||||
from .mock import create_mock_engine as create_mock_engine
|
||||
from .reflection import Inspector as Inspector
|
||||
from .reflection import ObjectKind as ObjectKind
|
||||
from .reflection import ObjectScope as ObjectScope
|
||||
from .result import ChunkedIteratorResult as ChunkedIteratorResult
|
||||
from .result import FilterResult as FilterResult
|
||||
from .result import FrozenResult as FrozenResult
|
||||
from .result import IteratorResult as IteratorResult
|
||||
from .result import MappingResult as MappingResult
|
||||
from .result import MergedResult as MergedResult
|
||||
from .result import Result as Result
|
||||
from .result import result_tuple as result_tuple
|
||||
from .result import ScalarResult as ScalarResult
|
||||
from .result import TupleResult as TupleResult
|
||||
from .row import BaseRow as BaseRow
|
||||
from .row import Row as Row
|
||||
from .row import RowMapping as RowMapping
|
||||
from .url import make_url as make_url
|
||||
from .url import URL as URL
|
||||
from .util import connection_memoize as connection_memoize
|
||||
from ..sql import ddl as ddl
|
|
@ -0,0 +1,136 @@
|
|||
# engine/_py_processors.py
|
||||
# Copyright (C) 2010-2024 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
# Copyright (C) 2010 Gaetan de Menten gdementen@gmail.com
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""defines generic type conversion functions, as used in bind and result
|
||||
processors.
|
||||
|
||||
They all share one common characteristic: None is passed through unchanged.
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import datetime
|
||||
from datetime import date as date_cls
|
||||
from datetime import datetime as datetime_cls
|
||||
from datetime import time as time_cls
|
||||
from decimal import Decimal
|
||||
import typing
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import Optional
|
||||
from typing import Type
|
||||
from typing import TypeVar
|
||||
from typing import Union
|
||||
|
||||
|
||||
_DT = TypeVar(
|
||||
"_DT", bound=Union[datetime.datetime, datetime.time, datetime.date]
|
||||
)
|
||||
|
||||
|
||||
def str_to_datetime_processor_factory(
|
||||
regexp: typing.Pattern[str], type_: Callable[..., _DT]
|
||||
) -> Callable[[Optional[str]], Optional[_DT]]:
|
||||
rmatch = regexp.match
|
||||
# Even on python2.6 datetime.strptime is both slower than this code
|
||||
# and it does not support microseconds.
|
||||
has_named_groups = bool(regexp.groupindex)
|
||||
|
||||
def process(value: Optional[str]) -> Optional[_DT]:
|
||||
if value is None:
|
||||
return None
|
||||
else:
|
||||
try:
|
||||
m = rmatch(value)
|
||||
except TypeError as err:
|
||||
raise ValueError(
|
||||
"Couldn't parse %s string '%r' "
|
||||
"- value is not a string." % (type_.__name__, value)
|
||||
) from err
|
||||
|
||||
if m is None:
|
||||
raise ValueError(
|
||||
"Couldn't parse %s string: "
|
||||
"'%s'" % (type_.__name__, value)
|
||||
)
|
||||
if has_named_groups:
|
||||
groups = m.groupdict(0)
|
||||
return type_(
|
||||
**dict(
|
||||
list(
|
||||
zip(
|
||||
iter(groups.keys()),
|
||||
list(map(int, iter(groups.values()))),
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
else:
|
||||
return type_(*list(map(int, m.groups(0))))
|
||||
|
||||
return process
|
||||
|
||||
|
||||
def to_decimal_processor_factory(
|
||||
target_class: Type[Decimal], scale: int
|
||||
) -> Callable[[Optional[float]], Optional[Decimal]]:
|
||||
fstring = "%%.%df" % scale
|
||||
|
||||
def process(value: Optional[float]) -> Optional[Decimal]:
|
||||
if value is None:
|
||||
return None
|
||||
else:
|
||||
return target_class(fstring % value)
|
||||
|
||||
return process
|
||||
|
||||
|
||||
def to_float(value: Optional[Union[int, float]]) -> Optional[float]:
|
||||
if value is None:
|
||||
return None
|
||||
else:
|
||||
return float(value)
|
||||
|
||||
|
||||
def to_str(value: Optional[Any]) -> Optional[str]:
|
||||
if value is None:
|
||||
return None
|
||||
else:
|
||||
return str(value)
|
||||
|
||||
|
||||
def int_to_boolean(value: Optional[int]) -> Optional[bool]:
|
||||
if value is None:
|
||||
return None
|
||||
else:
|
||||
return bool(value)
|
||||
|
||||
|
||||
def str_to_datetime(value: Optional[str]) -> Optional[datetime.datetime]:
|
||||
if value is not None:
|
||||
dt_value = datetime_cls.fromisoformat(value)
|
||||
else:
|
||||
dt_value = None
|
||||
return dt_value
|
||||
|
||||
|
||||
def str_to_time(value: Optional[str]) -> Optional[datetime.time]:
|
||||
if value is not None:
|
||||
dt_value = time_cls.fromisoformat(value)
|
||||
else:
|
||||
dt_value = None
|
||||
return dt_value
|
||||
|
||||
|
||||
def str_to_date(value: Optional[str]) -> Optional[datetime.date]:
|
||||
if value is not None:
|
||||
dt_value = date_cls.fromisoformat(value)
|
||||
else:
|
||||
dt_value = None
|
||||
return dt_value
|
|
@ -0,0 +1,128 @@
|
|||
# engine/_py_row.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
from __future__ import annotations
|
||||
|
||||
import operator
|
||||
import typing
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import Dict
|
||||
from typing import Iterator
|
||||
from typing import List
|
||||
from typing import Mapping
|
||||
from typing import Optional
|
||||
from typing import Tuple
|
||||
from typing import Type
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from .result import _KeyType
|
||||
from .result import _ProcessorsType
|
||||
from .result import _RawRowType
|
||||
from .result import _TupleGetterType
|
||||
from .result import ResultMetaData
|
||||
|
||||
MD_INDEX = 0 # integer index in cursor.description
|
||||
|
||||
|
||||
class BaseRow:
|
||||
__slots__ = ("_parent", "_data", "_key_to_index")
|
||||
|
||||
_parent: ResultMetaData
|
||||
_key_to_index: Mapping[_KeyType, int]
|
||||
_data: _RawRowType
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
parent: ResultMetaData,
|
||||
processors: Optional[_ProcessorsType],
|
||||
key_to_index: Mapping[_KeyType, int],
|
||||
data: _RawRowType,
|
||||
):
|
||||
"""Row objects are constructed by CursorResult objects."""
|
||||
object.__setattr__(self, "_parent", parent)
|
||||
|
||||
object.__setattr__(self, "_key_to_index", key_to_index)
|
||||
|
||||
if processors:
|
||||
object.__setattr__(
|
||||
self,
|
||||
"_data",
|
||||
tuple(
|
||||
[
|
||||
proc(value) if proc else value
|
||||
for proc, value in zip(processors, data)
|
||||
]
|
||||
),
|
||||
)
|
||||
else:
|
||||
object.__setattr__(self, "_data", tuple(data))
|
||||
|
||||
def __reduce__(self) -> Tuple[Callable[..., BaseRow], Tuple[Any, ...]]:
|
||||
return (
|
||||
rowproxy_reconstructor,
|
||||
(self.__class__, self.__getstate__()),
|
||||
)
|
||||
|
||||
def __getstate__(self) -> Dict[str, Any]:
|
||||
return {"_parent": self._parent, "_data": self._data}
|
||||
|
||||
def __setstate__(self, state: Dict[str, Any]) -> None:
|
||||
parent = state["_parent"]
|
||||
object.__setattr__(self, "_parent", parent)
|
||||
object.__setattr__(self, "_data", state["_data"])
|
||||
object.__setattr__(self, "_key_to_index", parent._key_to_index)
|
||||
|
||||
def _values_impl(self) -> List[Any]:
|
||||
return list(self)
|
||||
|
||||
def __iter__(self) -> Iterator[Any]:
|
||||
return iter(self._data)
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self._data)
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash(self._data)
|
||||
|
||||
def __getitem__(self, key: Any) -> Any:
|
||||
return self._data[key]
|
||||
|
||||
def _get_by_key_impl_mapping(self, key: str) -> Any:
|
||||
try:
|
||||
return self._data[self._key_to_index[key]]
|
||||
except KeyError:
|
||||
pass
|
||||
self._parent._key_not_found(key, False)
|
||||
|
||||
def __getattr__(self, name: str) -> Any:
|
||||
try:
|
||||
return self._data[self._key_to_index[name]]
|
||||
except KeyError:
|
||||
pass
|
||||
self._parent._key_not_found(name, True)
|
||||
|
||||
def _to_tuple_instance(self) -> Tuple[Any, ...]:
|
||||
return self._data
|
||||
|
||||
|
||||
# This reconstructor is necessary so that pickles with the Cy extension or
|
||||
# without use the same Binary format.
|
||||
def rowproxy_reconstructor(
|
||||
cls: Type[BaseRow], state: Dict[str, Any]
|
||||
) -> BaseRow:
|
||||
obj = cls.__new__(cls)
|
||||
obj.__setstate__(state)
|
||||
return obj
|
||||
|
||||
|
||||
def tuplegetter(*indexes: int) -> _TupleGetterType:
|
||||
if len(indexes) != 1:
|
||||
for i in range(1, len(indexes)):
|
||||
if indexes[i - 1] != indexes[i] - 1:
|
||||
return operator.itemgetter(*indexes)
|
||||
# slice form is faster but returns a list if input is list
|
||||
return operator.itemgetter(slice(indexes[0], indexes[-1] + 1))
|
|
@ -0,0 +1,74 @@
|
|||
# engine/_py_util.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
from __future__ import annotations
|
||||
|
||||
import typing
|
||||
from typing import Any
|
||||
from typing import Mapping
|
||||
from typing import Optional
|
||||
from typing import Tuple
|
||||
|
||||
from .. import exc
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from .interfaces import _CoreAnyExecuteParams
|
||||
from .interfaces import _CoreMultiExecuteParams
|
||||
from .interfaces import _DBAPIAnyExecuteParams
|
||||
from .interfaces import _DBAPIMultiExecuteParams
|
||||
|
||||
|
||||
_no_tuple: Tuple[Any, ...] = ()
|
||||
|
||||
|
||||
def _distill_params_20(
|
||||
params: Optional[_CoreAnyExecuteParams],
|
||||
) -> _CoreMultiExecuteParams:
|
||||
if params is None:
|
||||
return _no_tuple
|
||||
# Assume list is more likely than tuple
|
||||
elif isinstance(params, list) or isinstance(params, tuple):
|
||||
# collections_abc.MutableSequence): # avoid abc.__instancecheck__
|
||||
if params and not isinstance(params[0], (tuple, Mapping)):
|
||||
raise exc.ArgumentError(
|
||||
"List argument must consist only of tuples or dictionaries"
|
||||
)
|
||||
|
||||
return params
|
||||
elif isinstance(params, dict) or isinstance(
|
||||
# only do immutabledict or abc.__instancecheck__ for Mapping after
|
||||
# we've checked for plain dictionaries and would otherwise raise
|
||||
params,
|
||||
Mapping,
|
||||
):
|
||||
return [params]
|
||||
else:
|
||||
raise exc.ArgumentError("mapping or list expected for parameters")
|
||||
|
||||
|
||||
def _distill_raw_params(
|
||||
params: Optional[_DBAPIAnyExecuteParams],
|
||||
) -> _DBAPIMultiExecuteParams:
|
||||
if params is None:
|
||||
return _no_tuple
|
||||
elif isinstance(params, list):
|
||||
# collections_abc.MutableSequence): # avoid abc.__instancecheck__
|
||||
if params and not isinstance(params[0], (tuple, Mapping)):
|
||||
raise exc.ArgumentError(
|
||||
"List argument must consist only of tuples or dictionaries"
|
||||
)
|
||||
|
||||
return params
|
||||
elif isinstance(params, (tuple, dict)) or isinstance(
|
||||
# only do abc.__instancecheck__ for Mapping after we've checked
|
||||
# for plain dictionaries and would otherwise raise
|
||||
params,
|
||||
Mapping,
|
||||
):
|
||||
# cast("Union[List[Mapping[str, Any]], Tuple[Any, ...]]", [params])
|
||||
return [params] # type: ignore
|
||||
else:
|
||||
raise exc.ArgumentError("mapping or sequence expected for parameters")
|
3355
elitebot/lib/python3.11/site-packages/sqlalchemy/engine/base.py
Normal file
3355
elitebot/lib/python3.11/site-packages/sqlalchemy/engine/base.py
Normal file
File diff suppressed because it is too large
Load diff
|
@ -0,0 +1,81 @@
|
|||
# engine/characteristics.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
from __future__ import annotations
|
||||
|
||||
import abc
|
||||
import typing
|
||||
from typing import Any
|
||||
from typing import ClassVar
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from .interfaces import DBAPIConnection
|
||||
from .interfaces import Dialect
|
||||
|
||||
|
||||
class ConnectionCharacteristic(abc.ABC):
|
||||
"""An abstract base for an object that can set, get and reset a
|
||||
per-connection characteristic, typically one that gets reset when the
|
||||
connection is returned to the connection pool.
|
||||
|
||||
transaction isolation is the canonical example, and the
|
||||
``IsolationLevelCharacteristic`` implementation provides this for the
|
||||
``DefaultDialect``.
|
||||
|
||||
The ``ConnectionCharacteristic`` class should call upon the ``Dialect`` for
|
||||
the implementation of each method. The object exists strictly to serve as
|
||||
a dialect visitor that can be placed into the
|
||||
``DefaultDialect.connection_characteristics`` dictionary where it will take
|
||||
effect for calls to :meth:`_engine.Connection.execution_options` and
|
||||
related APIs.
|
||||
|
||||
.. versionadded:: 1.4
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
transactional: ClassVar[bool] = False
|
||||
|
||||
@abc.abstractmethod
|
||||
def reset_characteristic(
|
||||
self, dialect: Dialect, dbapi_conn: DBAPIConnection
|
||||
) -> None:
|
||||
"""Reset the characteristic on the connection to its default value."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def set_characteristic(
|
||||
self, dialect: Dialect, dbapi_conn: DBAPIConnection, value: Any
|
||||
) -> None:
|
||||
"""set characteristic on the connection to a given value."""
|
||||
|
||||
@abc.abstractmethod
|
||||
def get_characteristic(
|
||||
self, dialect: Dialect, dbapi_conn: DBAPIConnection
|
||||
) -> Any:
|
||||
"""Given a DBAPI connection, get the current value of the
|
||||
characteristic.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class IsolationLevelCharacteristic(ConnectionCharacteristic):
|
||||
transactional: ClassVar[bool] = True
|
||||
|
||||
def reset_characteristic(
|
||||
self, dialect: Dialect, dbapi_conn: DBAPIConnection
|
||||
) -> None:
|
||||
dialect.reset_isolation_level(dbapi_conn)
|
||||
|
||||
def set_characteristic(
|
||||
self, dialect: Dialect, dbapi_conn: DBAPIConnection, value: Any
|
||||
) -> None:
|
||||
dialect._assert_and_set_isolation_level(dbapi_conn, value)
|
||||
|
||||
def get_characteristic(
|
||||
self, dialect: Dialect, dbapi_conn: DBAPIConnection
|
||||
) -> Any:
|
||||
return dialect.get_isolation_level(dbapi_conn)
|
|
@ -0,0 +1,864 @@
|
|||
# engine/create.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import inspect
|
||||
import typing
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import cast
|
||||
from typing import Dict
|
||||
from typing import List
|
||||
from typing import Optional
|
||||
from typing import overload
|
||||
from typing import Type
|
||||
from typing import Union
|
||||
|
||||
from . import base
|
||||
from . import url as _url
|
||||
from .interfaces import DBAPIConnection
|
||||
from .mock import create_mock_engine
|
||||
from .. import event
|
||||
from .. import exc
|
||||
from .. import util
|
||||
from ..pool import _AdhocProxiedConnection
|
||||
from ..pool import ConnectionPoolEntry
|
||||
from ..sql import compiler
|
||||
from ..util import immutabledict
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from .base import Engine
|
||||
from .interfaces import _ExecuteOptions
|
||||
from .interfaces import _ParamStyle
|
||||
from .interfaces import IsolationLevel
|
||||
from .url import URL
|
||||
from ..log import _EchoFlagType
|
||||
from ..pool import _CreatorFnType
|
||||
from ..pool import _CreatorWRecFnType
|
||||
from ..pool import _ResetStyleArgType
|
||||
from ..pool import Pool
|
||||
from ..util.typing import Literal
|
||||
|
||||
|
||||
@overload
|
||||
def create_engine(
|
||||
url: Union[str, URL],
|
||||
*,
|
||||
connect_args: Dict[Any, Any] = ...,
|
||||
convert_unicode: bool = ...,
|
||||
creator: Union[_CreatorFnType, _CreatorWRecFnType] = ...,
|
||||
echo: _EchoFlagType = ...,
|
||||
echo_pool: _EchoFlagType = ...,
|
||||
enable_from_linting: bool = ...,
|
||||
execution_options: _ExecuteOptions = ...,
|
||||
future: Literal[True],
|
||||
hide_parameters: bool = ...,
|
||||
implicit_returning: Literal[True] = ...,
|
||||
insertmanyvalues_page_size: int = ...,
|
||||
isolation_level: IsolationLevel = ...,
|
||||
json_deserializer: Callable[..., Any] = ...,
|
||||
json_serializer: Callable[..., Any] = ...,
|
||||
label_length: Optional[int] = ...,
|
||||
logging_name: str = ...,
|
||||
max_identifier_length: Optional[int] = ...,
|
||||
max_overflow: int = ...,
|
||||
module: Optional[Any] = ...,
|
||||
paramstyle: Optional[_ParamStyle] = ...,
|
||||
pool: Optional[Pool] = ...,
|
||||
poolclass: Optional[Type[Pool]] = ...,
|
||||
pool_logging_name: str = ...,
|
||||
pool_pre_ping: bool = ...,
|
||||
pool_size: int = ...,
|
||||
pool_recycle: int = ...,
|
||||
pool_reset_on_return: Optional[_ResetStyleArgType] = ...,
|
||||
pool_timeout: float = ...,
|
||||
pool_use_lifo: bool = ...,
|
||||
plugins: List[str] = ...,
|
||||
query_cache_size: int = ...,
|
||||
use_insertmanyvalues: bool = ...,
|
||||
**kwargs: Any,
|
||||
) -> Engine: ...
|
||||
|
||||
|
||||
@overload
|
||||
def create_engine(url: Union[str, URL], **kwargs: Any) -> Engine: ...
|
||||
|
||||
|
||||
@util.deprecated_params(
|
||||
strategy=(
|
||||
"1.4",
|
||||
"The :paramref:`_sa.create_engine.strategy` keyword is deprecated, "
|
||||
"and the only argument accepted is 'mock'; please use "
|
||||
":func:`.create_mock_engine` going forward. For general "
|
||||
"customization of create_engine which may have been accomplished "
|
||||
"using strategies, see :class:`.CreateEnginePlugin`.",
|
||||
),
|
||||
empty_in_strategy=(
|
||||
"1.4",
|
||||
"The :paramref:`_sa.create_engine.empty_in_strategy` keyword is "
|
||||
"deprecated, and no longer has any effect. All IN expressions "
|
||||
"are now rendered using "
|
||||
'the "expanding parameter" strategy which renders a set of bound'
|
||||
'expressions, or an "empty set" SELECT, at statement execution'
|
||||
"time.",
|
||||
),
|
||||
implicit_returning=(
|
||||
"2.0",
|
||||
"The :paramref:`_sa.create_engine.implicit_returning` parameter "
|
||||
"is deprecated and will be removed in a future release. ",
|
||||
),
|
||||
)
|
||||
def create_engine(url: Union[str, _url.URL], **kwargs: Any) -> Engine:
|
||||
"""Create a new :class:`_engine.Engine` instance.
|
||||
|
||||
The standard calling form is to send the :ref:`URL <database_urls>` as the
|
||||
first positional argument, usually a string
|
||||
that indicates database dialect and connection arguments::
|
||||
|
||||
engine = create_engine("postgresql+psycopg2://scott:tiger@localhost/test")
|
||||
|
||||
.. note::
|
||||
|
||||
Please review :ref:`database_urls` for general guidelines in composing
|
||||
URL strings. In particular, special characters, such as those often
|
||||
part of passwords, must be URL encoded to be properly parsed.
|
||||
|
||||
Additional keyword arguments may then follow it which
|
||||
establish various options on the resulting :class:`_engine.Engine`
|
||||
and its underlying :class:`.Dialect` and :class:`_pool.Pool`
|
||||
constructs::
|
||||
|
||||
engine = create_engine("mysql+mysqldb://scott:tiger@hostname/dbname",
|
||||
pool_recycle=3600, echo=True)
|
||||
|
||||
The string form of the URL is
|
||||
``dialect[+driver]://user:password@host/dbname[?key=value..]``, where
|
||||
``dialect`` is a database name such as ``mysql``, ``oracle``,
|
||||
``postgresql``, etc., and ``driver`` the name of a DBAPI, such as
|
||||
``psycopg2``, ``pyodbc``, ``cx_oracle``, etc. Alternatively,
|
||||
the URL can be an instance of :class:`~sqlalchemy.engine.url.URL`.
|
||||
|
||||
``**kwargs`` takes a wide variety of options which are routed
|
||||
towards their appropriate components. Arguments may be specific to
|
||||
the :class:`_engine.Engine`, the underlying :class:`.Dialect`,
|
||||
as well as the
|
||||
:class:`_pool.Pool`. Specific dialects also accept keyword arguments that
|
||||
are unique to that dialect. Here, we describe the parameters
|
||||
that are common to most :func:`_sa.create_engine()` usage.
|
||||
|
||||
Once established, the newly resulting :class:`_engine.Engine` will
|
||||
request a connection from the underlying :class:`_pool.Pool` once
|
||||
:meth:`_engine.Engine.connect` is called, or a method which depends on it
|
||||
such as :meth:`_engine.Engine.execute` is invoked. The
|
||||
:class:`_pool.Pool` in turn
|
||||
will establish the first actual DBAPI connection when this request
|
||||
is received. The :func:`_sa.create_engine` call itself does **not**
|
||||
establish any actual DBAPI connections directly.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:doc:`/core/engines`
|
||||
|
||||
:doc:`/dialects/index`
|
||||
|
||||
:ref:`connections_toplevel`
|
||||
|
||||
:param connect_args: a dictionary of options which will be
|
||||
passed directly to the DBAPI's ``connect()`` method as
|
||||
additional keyword arguments. See the example
|
||||
at :ref:`custom_dbapi_args`.
|
||||
|
||||
:param creator: a callable which returns a DBAPI connection.
|
||||
This creation function will be passed to the underlying
|
||||
connection pool and will be used to create all new database
|
||||
connections. Usage of this function causes connection
|
||||
parameters specified in the URL argument to be bypassed.
|
||||
|
||||
This hook is not as flexible as the newer
|
||||
:meth:`_events.DialectEvents.do_connect` hook which allows complete
|
||||
control over how a connection is made to the database, given the full
|
||||
set of URL arguments and state beforehand.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`_events.DialectEvents.do_connect` - event hook that allows
|
||||
full control over DBAPI connection mechanics.
|
||||
|
||||
:ref:`custom_dbapi_args`
|
||||
|
||||
:param echo=False: if True, the Engine will log all statements
|
||||
as well as a ``repr()`` of their parameter lists to the default log
|
||||
handler, which defaults to ``sys.stdout`` for output. If set to the
|
||||
string ``"debug"``, result rows will be printed to the standard output
|
||||
as well. The ``echo`` attribute of ``Engine`` can be modified at any
|
||||
time to turn logging on and off; direct control of logging is also
|
||||
available using the standard Python ``logging`` module.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`dbengine_logging` - further detail on how to configure
|
||||
logging.
|
||||
|
||||
|
||||
:param echo_pool=False: if True, the connection pool will log
|
||||
informational output such as when connections are invalidated
|
||||
as well as when connections are recycled to the default log handler,
|
||||
which defaults to ``sys.stdout`` for output. If set to the string
|
||||
``"debug"``, the logging will include pool checkouts and checkins.
|
||||
Direct control of logging is also available using the standard Python
|
||||
``logging`` module.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`dbengine_logging` - further detail on how to configure
|
||||
logging.
|
||||
|
||||
|
||||
:param empty_in_strategy: No longer used; SQLAlchemy now uses
|
||||
"empty set" behavior for IN in all cases.
|
||||
|
||||
:param enable_from_linting: defaults to True. Will emit a warning
|
||||
if a given SELECT statement is found to have un-linked FROM elements
|
||||
which would cause a cartesian product.
|
||||
|
||||
.. versionadded:: 1.4
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`change_4737`
|
||||
|
||||
:param execution_options: Dictionary execution options which will
|
||||
be applied to all connections. See
|
||||
:meth:`~sqlalchemy.engine.Connection.execution_options`
|
||||
|
||||
:param future: Use the 2.0 style :class:`_engine.Engine` and
|
||||
:class:`_engine.Connection` API.
|
||||
|
||||
As of SQLAlchemy 2.0, this parameter is present for backwards
|
||||
compatibility only and must remain at its default value of ``True``.
|
||||
|
||||
The :paramref:`_sa.create_engine.future` parameter will be
|
||||
deprecated in a subsequent 2.x release and eventually removed.
|
||||
|
||||
.. versionadded:: 1.4
|
||||
|
||||
.. versionchanged:: 2.0 All :class:`_engine.Engine` objects are
|
||||
"future" style engines and there is no longer a ``future=False``
|
||||
mode of operation.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`migration_20_toplevel`
|
||||
|
||||
:param hide_parameters: Boolean, when set to True, SQL statement parameters
|
||||
will not be displayed in INFO logging nor will they be formatted into
|
||||
the string representation of :class:`.StatementError` objects.
|
||||
|
||||
.. versionadded:: 1.3.8
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`dbengine_logging` - further detail on how to configure
|
||||
logging.
|
||||
|
||||
:param implicit_returning=True: Legacy parameter that may only be set
|
||||
to True. In SQLAlchemy 2.0, this parameter does nothing. In order to
|
||||
disable "implicit returning" for statements invoked by the ORM,
|
||||
configure this on a per-table basis using the
|
||||
:paramref:`.Table.implicit_returning` parameter.
|
||||
|
||||
|
||||
:param insertmanyvalues_page_size: number of rows to format into an
|
||||
INSERT statement when the statement uses "insertmanyvalues" mode, which is
|
||||
a paged form of bulk insert that is used for many backends when using
|
||||
:term:`executemany` execution typically in conjunction with RETURNING.
|
||||
Defaults to 1000, but may also be subject to dialect-specific limiting
|
||||
factors which may override this value on a per-statement basis.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`engine_insertmanyvalues`
|
||||
|
||||
:ref:`engine_insertmanyvalues_page_size`
|
||||
|
||||
:paramref:`_engine.Connection.execution_options.insertmanyvalues_page_size`
|
||||
|
||||
:param isolation_level: optional string name of an isolation level
|
||||
which will be set on all new connections unconditionally.
|
||||
Isolation levels are typically some subset of the string names
|
||||
``"SERIALIZABLE"``, ``"REPEATABLE READ"``,
|
||||
``"READ COMMITTED"``, ``"READ UNCOMMITTED"`` and ``"AUTOCOMMIT"``
|
||||
based on backend.
|
||||
|
||||
The :paramref:`_sa.create_engine.isolation_level` parameter is
|
||||
in contrast to the
|
||||
:paramref:`.Connection.execution_options.isolation_level`
|
||||
execution option, which may be set on an individual
|
||||
:class:`.Connection`, as well as the same parameter passed to
|
||||
:meth:`.Engine.execution_options`, where it may be used to create
|
||||
multiple engines with different isolation levels that share a common
|
||||
connection pool and dialect.
|
||||
|
||||
.. versionchanged:: 2.0 The
|
||||
:paramref:`_sa.create_engine.isolation_level`
|
||||
parameter has been generalized to work on all dialects which support
|
||||
the concept of isolation level, and is provided as a more succinct,
|
||||
up front configuration switch in contrast to the execution option
|
||||
which is more of an ad-hoc programmatic option.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`dbapi_autocommit`
|
||||
|
||||
:param json_deserializer: for dialects that support the
|
||||
:class:`_types.JSON`
|
||||
datatype, this is a Python callable that will convert a JSON string
|
||||
to a Python object. By default, the Python ``json.loads`` function is
|
||||
used.
|
||||
|
||||
.. versionchanged:: 1.3.7 The SQLite dialect renamed this from
|
||||
``_json_deserializer``.
|
||||
|
||||
:param json_serializer: for dialects that support the :class:`_types.JSON`
|
||||
datatype, this is a Python callable that will render a given object
|
||||
as JSON. By default, the Python ``json.dumps`` function is used.
|
||||
|
||||
.. versionchanged:: 1.3.7 The SQLite dialect renamed this from
|
||||
``_json_serializer``.
|
||||
|
||||
|
||||
:param label_length=None: optional integer value which limits
|
||||
the size of dynamically generated column labels to that many
|
||||
characters. If less than 6, labels are generated as
|
||||
"_(counter)". If ``None``, the value of
|
||||
``dialect.max_identifier_length``, which may be affected via the
|
||||
:paramref:`_sa.create_engine.max_identifier_length` parameter,
|
||||
is used instead. The value of
|
||||
:paramref:`_sa.create_engine.label_length`
|
||||
may not be larger than that of
|
||||
:paramref:`_sa.create_engine.max_identfier_length`.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:paramref:`_sa.create_engine.max_identifier_length`
|
||||
|
||||
:param logging_name: String identifier which will be used within
|
||||
the "name" field of logging records generated within the
|
||||
"sqlalchemy.engine" logger. Defaults to a hexstring of the
|
||||
object's id.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`dbengine_logging` - further detail on how to configure
|
||||
logging.
|
||||
|
||||
:paramref:`_engine.Connection.execution_options.logging_token`
|
||||
|
||||
:param max_identifier_length: integer; override the max_identifier_length
|
||||
determined by the dialect. if ``None`` or zero, has no effect. This
|
||||
is the database's configured maximum number of characters that may be
|
||||
used in a SQL identifier such as a table name, column name, or label
|
||||
name. All dialects determine this value automatically, however in the
|
||||
case of a new database version for which this value has changed but
|
||||
SQLAlchemy's dialect has not been adjusted, the value may be passed
|
||||
here.
|
||||
|
||||
.. versionadded:: 1.3.9
|
||||
|
||||
.. seealso::
|
||||
|
||||
:paramref:`_sa.create_engine.label_length`
|
||||
|
||||
:param max_overflow=10: the number of connections to allow in
|
||||
connection pool "overflow", that is connections that can be
|
||||
opened above and beyond the pool_size setting, which defaults
|
||||
to five. this is only used with :class:`~sqlalchemy.pool.QueuePool`.
|
||||
|
||||
:param module=None: reference to a Python module object (the module
|
||||
itself, not its string name). Specifies an alternate DBAPI module to
|
||||
be used by the engine's dialect. Each sub-dialect references a
|
||||
specific DBAPI which will be imported before first connect. This
|
||||
parameter causes the import to be bypassed, and the given module to
|
||||
be used instead. Can be used for testing of DBAPIs as well as to
|
||||
inject "mock" DBAPI implementations into the :class:`_engine.Engine`.
|
||||
|
||||
:param paramstyle=None: The `paramstyle <https://legacy.python.org/dev/peps/pep-0249/#paramstyle>`_
|
||||
to use when rendering bound parameters. This style defaults to the
|
||||
one recommended by the DBAPI itself, which is retrieved from the
|
||||
``.paramstyle`` attribute of the DBAPI. However, most DBAPIs accept
|
||||
more than one paramstyle, and in particular it may be desirable
|
||||
to change a "named" paramstyle into a "positional" one, or vice versa.
|
||||
When this attribute is passed, it should be one of the values
|
||||
``"qmark"``, ``"numeric"``, ``"named"``, ``"format"`` or
|
||||
``"pyformat"``, and should correspond to a parameter style known
|
||||
to be supported by the DBAPI in use.
|
||||
|
||||
:param pool=None: an already-constructed instance of
|
||||
:class:`~sqlalchemy.pool.Pool`, such as a
|
||||
:class:`~sqlalchemy.pool.QueuePool` instance. If non-None, this
|
||||
pool will be used directly as the underlying connection pool
|
||||
for the engine, bypassing whatever connection parameters are
|
||||
present in the URL argument. For information on constructing
|
||||
connection pools manually, see :ref:`pooling_toplevel`.
|
||||
|
||||
:param poolclass=None: a :class:`~sqlalchemy.pool.Pool`
|
||||
subclass, which will be used to create a connection pool
|
||||
instance using the connection parameters given in the URL. Note
|
||||
this differs from ``pool`` in that you don't actually
|
||||
instantiate the pool in this case, you just indicate what type
|
||||
of pool to be used.
|
||||
|
||||
:param pool_logging_name: String identifier which will be used within
|
||||
the "name" field of logging records generated within the
|
||||
"sqlalchemy.pool" logger. Defaults to a hexstring of the object's
|
||||
id.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`dbengine_logging` - further detail on how to configure
|
||||
logging.
|
||||
|
||||
:param pool_pre_ping: boolean, if True will enable the connection pool
|
||||
"pre-ping" feature that tests connections for liveness upon
|
||||
each checkout.
|
||||
|
||||
.. versionadded:: 1.2
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`pool_disconnects_pessimistic`
|
||||
|
||||
:param pool_size=5: the number of connections to keep open
|
||||
inside the connection pool. This used with
|
||||
:class:`~sqlalchemy.pool.QueuePool` as
|
||||
well as :class:`~sqlalchemy.pool.SingletonThreadPool`. With
|
||||
:class:`~sqlalchemy.pool.QueuePool`, a ``pool_size`` setting
|
||||
of 0 indicates no limit; to disable pooling, set ``poolclass`` to
|
||||
:class:`~sqlalchemy.pool.NullPool` instead.
|
||||
|
||||
:param pool_recycle=-1: this setting causes the pool to recycle
|
||||
connections after the given number of seconds has passed. It
|
||||
defaults to -1, or no timeout. For example, setting to 3600
|
||||
means connections will be recycled after one hour. Note that
|
||||
MySQL in particular will disconnect automatically if no
|
||||
activity is detected on a connection for eight hours (although
|
||||
this is configurable with the MySQLDB connection itself and the
|
||||
server configuration as well).
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`pool_setting_recycle`
|
||||
|
||||
:param pool_reset_on_return='rollback': set the
|
||||
:paramref:`_pool.Pool.reset_on_return` parameter of the underlying
|
||||
:class:`_pool.Pool` object, which can be set to the values
|
||||
``"rollback"``, ``"commit"``, or ``None``.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`pool_reset_on_return`
|
||||
|
||||
:param pool_timeout=30: number of seconds to wait before giving
|
||||
up on getting a connection from the pool. This is only used
|
||||
with :class:`~sqlalchemy.pool.QueuePool`. This can be a float but is
|
||||
subject to the limitations of Python time functions which may not be
|
||||
reliable in the tens of milliseconds.
|
||||
|
||||
.. note: don't use 30.0 above, it seems to break with the :param tag
|
||||
|
||||
:param pool_use_lifo=False: use LIFO (last-in-first-out) when retrieving
|
||||
connections from :class:`.QueuePool` instead of FIFO
|
||||
(first-in-first-out). Using LIFO, a server-side timeout scheme can
|
||||
reduce the number of connections used during non- peak periods of
|
||||
use. When planning for server-side timeouts, ensure that a recycle or
|
||||
pre-ping strategy is in use to gracefully handle stale connections.
|
||||
|
||||
.. versionadded:: 1.3
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`pool_use_lifo`
|
||||
|
||||
:ref:`pool_disconnects`
|
||||
|
||||
:param plugins: string list of plugin names to load. See
|
||||
:class:`.CreateEnginePlugin` for background.
|
||||
|
||||
.. versionadded:: 1.2.3
|
||||
|
||||
:param query_cache_size: size of the cache used to cache the SQL string
|
||||
form of queries. Set to zero to disable caching.
|
||||
|
||||
The cache is pruned of its least recently used items when its size reaches
|
||||
N * 1.5. Defaults to 500, meaning the cache will always store at least
|
||||
500 SQL statements when filled, and will grow up to 750 items at which
|
||||
point it is pruned back down to 500 by removing the 250 least recently
|
||||
used items.
|
||||
|
||||
Caching is accomplished on a per-statement basis by generating a
|
||||
cache key that represents the statement's structure, then generating
|
||||
string SQL for the current dialect only if that key is not present
|
||||
in the cache. All statements support caching, however some features
|
||||
such as an INSERT with a large set of parameters will intentionally
|
||||
bypass the cache. SQL logging will indicate statistics for each
|
||||
statement whether or not it were pull from the cache.
|
||||
|
||||
.. note:: some ORM functions related to unit-of-work persistence as well
|
||||
as some attribute loading strategies will make use of individual
|
||||
per-mapper caches outside of the main cache.
|
||||
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`sql_caching`
|
||||
|
||||
.. versionadded:: 1.4
|
||||
|
||||
:param use_insertmanyvalues: True by default, use the "insertmanyvalues"
|
||||
execution style for INSERT..RETURNING statements by default.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`engine_insertmanyvalues`
|
||||
|
||||
""" # noqa
|
||||
|
||||
if "strategy" in kwargs:
|
||||
strat = kwargs.pop("strategy")
|
||||
if strat == "mock":
|
||||
# this case is deprecated
|
||||
return create_mock_engine(url, **kwargs) # type: ignore
|
||||
else:
|
||||
raise exc.ArgumentError("unknown strategy: %r" % strat)
|
||||
|
||||
kwargs.pop("empty_in_strategy", None)
|
||||
|
||||
# create url.URL object
|
||||
u = _url.make_url(url)
|
||||
|
||||
u, plugins, kwargs = u._instantiate_plugins(kwargs)
|
||||
|
||||
entrypoint = u._get_entrypoint()
|
||||
_is_async = kwargs.pop("_is_async", False)
|
||||
if _is_async:
|
||||
dialect_cls = entrypoint.get_async_dialect_cls(u)
|
||||
else:
|
||||
dialect_cls = entrypoint.get_dialect_cls(u)
|
||||
|
||||
if kwargs.pop("_coerce_config", False):
|
||||
|
||||
def pop_kwarg(key: str, default: Optional[Any] = None) -> Any:
|
||||
value = kwargs.pop(key, default)
|
||||
if key in dialect_cls.engine_config_types:
|
||||
value = dialect_cls.engine_config_types[key](value)
|
||||
return value
|
||||
|
||||
else:
|
||||
pop_kwarg = kwargs.pop # type: ignore
|
||||
|
||||
dialect_args = {}
|
||||
# consume dialect arguments from kwargs
|
||||
for k in util.get_cls_kwargs(dialect_cls):
|
||||
if k in kwargs:
|
||||
dialect_args[k] = pop_kwarg(k)
|
||||
|
||||
dbapi = kwargs.pop("module", None)
|
||||
if dbapi is None:
|
||||
dbapi_args = {}
|
||||
|
||||
if "import_dbapi" in dialect_cls.__dict__:
|
||||
dbapi_meth = dialect_cls.import_dbapi
|
||||
|
||||
elif hasattr(dialect_cls, "dbapi") and inspect.ismethod(
|
||||
dialect_cls.dbapi
|
||||
):
|
||||
util.warn_deprecated(
|
||||
"The dbapi() classmethod on dialect classes has been "
|
||||
"renamed to import_dbapi(). Implement an import_dbapi() "
|
||||
f"classmethod directly on class {dialect_cls} to remove this "
|
||||
"warning; the old .dbapi() classmethod may be maintained for "
|
||||
"backwards compatibility.",
|
||||
"2.0",
|
||||
)
|
||||
dbapi_meth = dialect_cls.dbapi
|
||||
else:
|
||||
dbapi_meth = dialect_cls.import_dbapi
|
||||
|
||||
for k in util.get_func_kwargs(dbapi_meth):
|
||||
if k in kwargs:
|
||||
dbapi_args[k] = pop_kwarg(k)
|
||||
dbapi = dbapi_meth(**dbapi_args)
|
||||
|
||||
dialect_args["dbapi"] = dbapi
|
||||
|
||||
dialect_args.setdefault("compiler_linting", compiler.NO_LINTING)
|
||||
enable_from_linting = kwargs.pop("enable_from_linting", True)
|
||||
if enable_from_linting:
|
||||
dialect_args["compiler_linting"] ^= compiler.COLLECT_CARTESIAN_PRODUCTS
|
||||
|
||||
for plugin in plugins:
|
||||
plugin.handle_dialect_kwargs(dialect_cls, dialect_args)
|
||||
|
||||
# create dialect
|
||||
dialect = dialect_cls(**dialect_args)
|
||||
|
||||
# assemble connection arguments
|
||||
(cargs_tup, cparams) = dialect.create_connect_args(u)
|
||||
cparams.update(pop_kwarg("connect_args", {}))
|
||||
|
||||
if "async_fallback" in cparams and util.asbool(cparams["async_fallback"]):
|
||||
util.warn_deprecated(
|
||||
"The async_fallback dialect argument is deprecated and will be "
|
||||
"removed in SQLAlchemy 2.1.",
|
||||
"2.0",
|
||||
)
|
||||
|
||||
cargs = list(cargs_tup) # allow mutability
|
||||
|
||||
# look for existing pool or create
|
||||
pool = pop_kwarg("pool", None)
|
||||
if pool is None:
|
||||
|
||||
def connect(
|
||||
connection_record: Optional[ConnectionPoolEntry] = None,
|
||||
) -> DBAPIConnection:
|
||||
if dialect._has_events:
|
||||
for fn in dialect.dispatch.do_connect:
|
||||
connection = cast(
|
||||
DBAPIConnection,
|
||||
fn(dialect, connection_record, cargs, cparams),
|
||||
)
|
||||
if connection is not None:
|
||||
return connection
|
||||
|
||||
return dialect.connect(*cargs, **cparams)
|
||||
|
||||
creator = pop_kwarg("creator", connect)
|
||||
|
||||
poolclass = pop_kwarg("poolclass", None)
|
||||
if poolclass is None:
|
||||
poolclass = dialect.get_dialect_pool_class(u)
|
||||
pool_args = {"dialect": dialect}
|
||||
|
||||
# consume pool arguments from kwargs, translating a few of
|
||||
# the arguments
|
||||
for k in util.get_cls_kwargs(poolclass):
|
||||
tk = _pool_translate_kwargs.get(k, k)
|
||||
if tk in kwargs:
|
||||
pool_args[k] = pop_kwarg(tk)
|
||||
|
||||
for plugin in plugins:
|
||||
plugin.handle_pool_kwargs(poolclass, pool_args)
|
||||
|
||||
pool = poolclass(creator, **pool_args)
|
||||
else:
|
||||
pool._dialect = dialect
|
||||
|
||||
# create engine.
|
||||
if not pop_kwarg("future", True):
|
||||
raise exc.ArgumentError(
|
||||
"The 'future' parameter passed to "
|
||||
"create_engine() may only be set to True."
|
||||
)
|
||||
|
||||
engineclass = base.Engine
|
||||
|
||||
engine_args = {}
|
||||
for k in util.get_cls_kwargs(engineclass):
|
||||
if k in kwargs:
|
||||
engine_args[k] = pop_kwarg(k)
|
||||
|
||||
# internal flags used by the test suite for instrumenting / proxying
|
||||
# engines with mocks etc.
|
||||
_initialize = kwargs.pop("_initialize", True)
|
||||
|
||||
# all kwargs should be consumed
|
||||
if kwargs:
|
||||
raise TypeError(
|
||||
"Invalid argument(s) %s sent to create_engine(), "
|
||||
"using configuration %s/%s/%s. Please check that the "
|
||||
"keyword arguments are appropriate for this combination "
|
||||
"of components."
|
||||
% (
|
||||
",".join("'%s'" % k for k in kwargs),
|
||||
dialect.__class__.__name__,
|
||||
pool.__class__.__name__,
|
||||
engineclass.__name__,
|
||||
)
|
||||
)
|
||||
|
||||
engine = engineclass(pool, dialect, u, **engine_args)
|
||||
|
||||
if _initialize:
|
||||
do_on_connect = dialect.on_connect_url(u)
|
||||
if do_on_connect:
|
||||
|
||||
def on_connect(
|
||||
dbapi_connection: DBAPIConnection,
|
||||
connection_record: ConnectionPoolEntry,
|
||||
) -> None:
|
||||
assert do_on_connect is not None
|
||||
do_on_connect(dbapi_connection)
|
||||
|
||||
event.listen(pool, "connect", on_connect)
|
||||
|
||||
builtin_on_connect = dialect._builtin_onconnect()
|
||||
if builtin_on_connect:
|
||||
event.listen(pool, "connect", builtin_on_connect)
|
||||
|
||||
def first_connect(
|
||||
dbapi_connection: DBAPIConnection,
|
||||
connection_record: ConnectionPoolEntry,
|
||||
) -> None:
|
||||
c = base.Connection(
|
||||
engine,
|
||||
connection=_AdhocProxiedConnection(
|
||||
dbapi_connection, connection_record
|
||||
),
|
||||
_has_events=False,
|
||||
# reconnecting will be a reentrant condition, so if the
|
||||
# connection goes away, Connection is then closed
|
||||
_allow_revalidate=False,
|
||||
# dont trigger the autobegin sequence
|
||||
# within the up front dialect checks
|
||||
_allow_autobegin=False,
|
||||
)
|
||||
c._execution_options = util.EMPTY_DICT
|
||||
|
||||
try:
|
||||
dialect.initialize(c)
|
||||
finally:
|
||||
# note that "invalidated" and "closed" are mutually
|
||||
# exclusive in 1.4 Connection.
|
||||
if not c.invalidated and not c.closed:
|
||||
# transaction is rolled back otherwise, tested by
|
||||
# test/dialect/postgresql/test_dialect.py
|
||||
# ::MiscBackendTest::test_initial_transaction_state
|
||||
dialect.do_rollback(c.connection)
|
||||
|
||||
# previously, the "first_connect" event was used here, which was then
|
||||
# scaled back if the "on_connect" handler were present. now,
|
||||
# since "on_connect" is virtually always present, just use
|
||||
# "connect" event with once_unless_exception in all cases so that
|
||||
# the connection event flow is consistent in all cases.
|
||||
event.listen(
|
||||
pool, "connect", first_connect, _once_unless_exception=True
|
||||
)
|
||||
|
||||
dialect_cls.engine_created(engine)
|
||||
if entrypoint is not dialect_cls:
|
||||
entrypoint.engine_created(engine)
|
||||
|
||||
for plugin in plugins:
|
||||
plugin.engine_created(engine)
|
||||
|
||||
return engine
|
||||
|
||||
|
||||
def engine_from_config(
|
||||
configuration: Dict[str, Any], prefix: str = "sqlalchemy.", **kwargs: Any
|
||||
) -> Engine:
|
||||
"""Create a new Engine instance using a configuration dictionary.
|
||||
|
||||
The dictionary is typically produced from a config file.
|
||||
|
||||
The keys of interest to ``engine_from_config()`` should be prefixed, e.g.
|
||||
``sqlalchemy.url``, ``sqlalchemy.echo``, etc. The 'prefix' argument
|
||||
indicates the prefix to be searched for. Each matching key (after the
|
||||
prefix is stripped) is treated as though it were the corresponding keyword
|
||||
argument to a :func:`_sa.create_engine` call.
|
||||
|
||||
The only required key is (assuming the default prefix) ``sqlalchemy.url``,
|
||||
which provides the :ref:`database URL <database_urls>`.
|
||||
|
||||
A select set of keyword arguments will be "coerced" to their
|
||||
expected type based on string values. The set of arguments
|
||||
is extensible per-dialect using the ``engine_config_types`` accessor.
|
||||
|
||||
:param configuration: A dictionary (typically produced from a config file,
|
||||
but this is not a requirement). Items whose keys start with the value
|
||||
of 'prefix' will have that prefix stripped, and will then be passed to
|
||||
:func:`_sa.create_engine`.
|
||||
|
||||
:param prefix: Prefix to match and then strip from keys
|
||||
in 'configuration'.
|
||||
|
||||
:param kwargs: Each keyword argument to ``engine_from_config()`` itself
|
||||
overrides the corresponding item taken from the 'configuration'
|
||||
dictionary. Keyword arguments should *not* be prefixed.
|
||||
|
||||
"""
|
||||
|
||||
options = {
|
||||
key[len(prefix) :]: configuration[key]
|
||||
for key in configuration
|
||||
if key.startswith(prefix)
|
||||
}
|
||||
options["_coerce_config"] = True
|
||||
options.update(kwargs)
|
||||
url = options.pop("url")
|
||||
return create_engine(url, **options)
|
||||
|
||||
|
||||
@overload
|
||||
def create_pool_from_url(
|
||||
url: Union[str, URL],
|
||||
*,
|
||||
poolclass: Optional[Type[Pool]] = ...,
|
||||
logging_name: str = ...,
|
||||
pre_ping: bool = ...,
|
||||
size: int = ...,
|
||||
recycle: int = ...,
|
||||
reset_on_return: Optional[_ResetStyleArgType] = ...,
|
||||
timeout: float = ...,
|
||||
use_lifo: bool = ...,
|
||||
**kwargs: Any,
|
||||
) -> Pool: ...
|
||||
|
||||
|
||||
@overload
|
||||
def create_pool_from_url(url: Union[str, URL], **kwargs: Any) -> Pool: ...
|
||||
|
||||
|
||||
def create_pool_from_url(url: Union[str, URL], **kwargs: Any) -> Pool:
|
||||
"""Create a pool instance from the given url.
|
||||
|
||||
If ``poolclass`` is not provided the pool class used
|
||||
is selected using the dialect specified in the URL.
|
||||
|
||||
The arguments passed to :func:`_sa.create_pool_from_url` are
|
||||
identical to the pool argument passed to the :func:`_sa.create_engine`
|
||||
function.
|
||||
|
||||
.. versionadded:: 2.0.10
|
||||
"""
|
||||
|
||||
for key in _pool_translate_kwargs:
|
||||
if key in kwargs:
|
||||
kwargs[_pool_translate_kwargs[key]] = kwargs.pop(key)
|
||||
|
||||
engine = create_engine(url, **kwargs, _initialize=False)
|
||||
return engine.pool
|
||||
|
||||
|
||||
_pool_translate_kwargs = immutabledict(
|
||||
{
|
||||
"logging_name": "pool_logging_name",
|
||||
"echo": "echo_pool",
|
||||
"timeout": "pool_timeout",
|
||||
"recycle": "pool_recycle",
|
||||
"events": "pool_events", # deprecated
|
||||
"reset_on_return": "pool_reset_on_return",
|
||||
"pre_ping": "pool_pre_ping",
|
||||
"use_lifo": "pool_use_lifo",
|
||||
}
|
||||
)
|
2150
elitebot/lib/python3.11/site-packages/sqlalchemy/engine/cursor.py
Normal file
2150
elitebot/lib/python3.11/site-packages/sqlalchemy/engine/cursor.py
Normal file
File diff suppressed because it is too large
Load diff
2339
elitebot/lib/python3.11/site-packages/sqlalchemy/engine/default.py
Normal file
2339
elitebot/lib/python3.11/site-packages/sqlalchemy/engine/default.py
Normal file
File diff suppressed because it is too large
Load diff
|
@ -0,0 +1,951 @@
|
|||
# engine/events.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import typing
|
||||
from typing import Any
|
||||
from typing import Dict
|
||||
from typing import Optional
|
||||
from typing import Tuple
|
||||
from typing import Type
|
||||
from typing import Union
|
||||
|
||||
from .base import Connection
|
||||
from .base import Engine
|
||||
from .interfaces import ConnectionEventsTarget
|
||||
from .interfaces import DBAPIConnection
|
||||
from .interfaces import DBAPICursor
|
||||
from .interfaces import Dialect
|
||||
from .. import event
|
||||
from .. import exc
|
||||
from ..util.typing import Literal
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from .interfaces import _CoreMultiExecuteParams
|
||||
from .interfaces import _CoreSingleExecuteParams
|
||||
from .interfaces import _DBAPIAnyExecuteParams
|
||||
from .interfaces import _DBAPIMultiExecuteParams
|
||||
from .interfaces import _DBAPISingleExecuteParams
|
||||
from .interfaces import _ExecuteOptions
|
||||
from .interfaces import ExceptionContext
|
||||
from .interfaces import ExecutionContext
|
||||
from .result import Result
|
||||
from ..pool import ConnectionPoolEntry
|
||||
from ..sql import Executable
|
||||
from ..sql.elements import BindParameter
|
||||
|
||||
|
||||
class ConnectionEvents(event.Events[ConnectionEventsTarget]):
|
||||
"""Available events for
|
||||
:class:`_engine.Connection` and :class:`_engine.Engine`.
|
||||
|
||||
The methods here define the name of an event as well as the names of
|
||||
members that are passed to listener functions.
|
||||
|
||||
An event listener can be associated with any
|
||||
:class:`_engine.Connection` or :class:`_engine.Engine`
|
||||
class or instance, such as an :class:`_engine.Engine`, e.g.::
|
||||
|
||||
from sqlalchemy import event, create_engine
|
||||
|
||||
def before_cursor_execute(conn, cursor, statement, parameters, context,
|
||||
executemany):
|
||||
log.info("Received statement: %s", statement)
|
||||
|
||||
engine = create_engine('postgresql+psycopg2://scott:tiger@localhost/test')
|
||||
event.listen(engine, "before_cursor_execute", before_cursor_execute)
|
||||
|
||||
or with a specific :class:`_engine.Connection`::
|
||||
|
||||
with engine.begin() as conn:
|
||||
@event.listens_for(conn, 'before_cursor_execute')
|
||||
def before_cursor_execute(conn, cursor, statement, parameters,
|
||||
context, executemany):
|
||||
log.info("Received statement: %s", statement)
|
||||
|
||||
When the methods are called with a `statement` parameter, such as in
|
||||
:meth:`.after_cursor_execute` or :meth:`.before_cursor_execute`,
|
||||
the statement is the exact SQL string that was prepared for transmission
|
||||
to the DBAPI ``cursor`` in the connection's :class:`.Dialect`.
|
||||
|
||||
The :meth:`.before_execute` and :meth:`.before_cursor_execute`
|
||||
events can also be established with the ``retval=True`` flag, which
|
||||
allows modification of the statement and parameters to be sent
|
||||
to the database. The :meth:`.before_cursor_execute` event is
|
||||
particularly useful here to add ad-hoc string transformations, such
|
||||
as comments, to all executions::
|
||||
|
||||
from sqlalchemy.engine import Engine
|
||||
from sqlalchemy import event
|
||||
|
||||
@event.listens_for(Engine, "before_cursor_execute", retval=True)
|
||||
def comment_sql_calls(conn, cursor, statement, parameters,
|
||||
context, executemany):
|
||||
statement = statement + " -- some comment"
|
||||
return statement, parameters
|
||||
|
||||
.. note:: :class:`_events.ConnectionEvents` can be established on any
|
||||
combination of :class:`_engine.Engine`, :class:`_engine.Connection`,
|
||||
as well
|
||||
as instances of each of those classes. Events across all
|
||||
four scopes will fire off for a given instance of
|
||||
:class:`_engine.Connection`. However, for performance reasons, the
|
||||
:class:`_engine.Connection` object determines at instantiation time
|
||||
whether or not its parent :class:`_engine.Engine` has event listeners
|
||||
established. Event listeners added to the :class:`_engine.Engine`
|
||||
class or to an instance of :class:`_engine.Engine`
|
||||
*after* the instantiation
|
||||
of a dependent :class:`_engine.Connection` instance will usually
|
||||
*not* be available on that :class:`_engine.Connection` instance.
|
||||
The newly
|
||||
added listeners will instead take effect for
|
||||
:class:`_engine.Connection`
|
||||
instances created subsequent to those event listeners being
|
||||
established on the parent :class:`_engine.Engine` class or instance.
|
||||
|
||||
:param retval=False: Applies to the :meth:`.before_execute` and
|
||||
:meth:`.before_cursor_execute` events only. When True, the
|
||||
user-defined event function must have a return value, which
|
||||
is a tuple of parameters that replace the given statement
|
||||
and parameters. See those methods for a description of
|
||||
specific return arguments.
|
||||
|
||||
""" # noqa
|
||||
|
||||
_target_class_doc = "SomeEngine"
|
||||
_dispatch_target = ConnectionEventsTarget
|
||||
|
||||
@classmethod
|
||||
def _accept_with(
|
||||
cls,
|
||||
target: Union[ConnectionEventsTarget, Type[ConnectionEventsTarget]],
|
||||
identifier: str,
|
||||
) -> Optional[Union[ConnectionEventsTarget, Type[ConnectionEventsTarget]]]:
|
||||
default_dispatch = super()._accept_with(target, identifier)
|
||||
if default_dispatch is None and hasattr(
|
||||
target, "_no_async_engine_events"
|
||||
):
|
||||
target._no_async_engine_events()
|
||||
|
||||
return default_dispatch
|
||||
|
||||
@classmethod
|
||||
def _listen(
|
||||
cls,
|
||||
event_key: event._EventKey[ConnectionEventsTarget],
|
||||
*,
|
||||
retval: bool = False,
|
||||
**kw: Any,
|
||||
) -> None:
|
||||
target, identifier, fn = (
|
||||
event_key.dispatch_target,
|
||||
event_key.identifier,
|
||||
event_key._listen_fn,
|
||||
)
|
||||
target._has_events = True
|
||||
|
||||
if not retval:
|
||||
if identifier == "before_execute":
|
||||
orig_fn = fn
|
||||
|
||||
def wrap_before_execute( # type: ignore
|
||||
conn, clauseelement, multiparams, params, execution_options
|
||||
):
|
||||
orig_fn(
|
||||
conn,
|
||||
clauseelement,
|
||||
multiparams,
|
||||
params,
|
||||
execution_options,
|
||||
)
|
||||
return clauseelement, multiparams, params
|
||||
|
||||
fn = wrap_before_execute
|
||||
elif identifier == "before_cursor_execute":
|
||||
orig_fn = fn
|
||||
|
||||
def wrap_before_cursor_execute( # type: ignore
|
||||
conn, cursor, statement, parameters, context, executemany
|
||||
):
|
||||
orig_fn(
|
||||
conn,
|
||||
cursor,
|
||||
statement,
|
||||
parameters,
|
||||
context,
|
||||
executemany,
|
||||
)
|
||||
return statement, parameters
|
||||
|
||||
fn = wrap_before_cursor_execute
|
||||
elif retval and identifier not in (
|
||||
"before_execute",
|
||||
"before_cursor_execute",
|
||||
):
|
||||
raise exc.ArgumentError(
|
||||
"Only the 'before_execute', "
|
||||
"'before_cursor_execute' and 'handle_error' engine "
|
||||
"event listeners accept the 'retval=True' "
|
||||
"argument."
|
||||
)
|
||||
event_key.with_wrapper(fn).base_listen()
|
||||
|
||||
@event._legacy_signature(
|
||||
"1.4",
|
||||
["conn", "clauseelement", "multiparams", "params"],
|
||||
lambda conn, clauseelement, multiparams, params, execution_options: (
|
||||
conn,
|
||||
clauseelement,
|
||||
multiparams,
|
||||
params,
|
||||
),
|
||||
)
|
||||
def before_execute(
|
||||
self,
|
||||
conn: Connection,
|
||||
clauseelement: Executable,
|
||||
multiparams: _CoreMultiExecuteParams,
|
||||
params: _CoreSingleExecuteParams,
|
||||
execution_options: _ExecuteOptions,
|
||||
) -> Optional[
|
||||
Tuple[Executable, _CoreMultiExecuteParams, _CoreSingleExecuteParams]
|
||||
]:
|
||||
"""Intercept high level execute() events, receiving uncompiled
|
||||
SQL constructs and other objects prior to rendering into SQL.
|
||||
|
||||
This event is good for debugging SQL compilation issues as well
|
||||
as early manipulation of the parameters being sent to the database,
|
||||
as the parameter lists will be in a consistent format here.
|
||||
|
||||
This event can be optionally established with the ``retval=True``
|
||||
flag. The ``clauseelement``, ``multiparams``, and ``params``
|
||||
arguments should be returned as a three-tuple in this case::
|
||||
|
||||
@event.listens_for(Engine, "before_execute", retval=True)
|
||||
def before_execute(conn, clauseelement, multiparams, params):
|
||||
# do something with clauseelement, multiparams, params
|
||||
return clauseelement, multiparams, params
|
||||
|
||||
:param conn: :class:`_engine.Connection` object
|
||||
:param clauseelement: SQL expression construct, :class:`.Compiled`
|
||||
instance, or string statement passed to
|
||||
:meth:`_engine.Connection.execute`.
|
||||
:param multiparams: Multiple parameter sets, a list of dictionaries.
|
||||
:param params: Single parameter set, a single dictionary.
|
||||
:param execution_options: dictionary of execution
|
||||
options passed along with the statement, if any. This is a merge
|
||||
of all options that will be used, including those of the statement,
|
||||
the connection, and those passed in to the method itself for
|
||||
the 2.0 style of execution.
|
||||
|
||||
.. versionadded: 1.4
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.before_cursor_execute`
|
||||
|
||||
"""
|
||||
|
||||
@event._legacy_signature(
|
||||
"1.4",
|
||||
["conn", "clauseelement", "multiparams", "params", "result"],
|
||||
lambda conn, clauseelement, multiparams, params, execution_options, result: ( # noqa
|
||||
conn,
|
||||
clauseelement,
|
||||
multiparams,
|
||||
params,
|
||||
result,
|
||||
),
|
||||
)
|
||||
def after_execute(
|
||||
self,
|
||||
conn: Connection,
|
||||
clauseelement: Executable,
|
||||
multiparams: _CoreMultiExecuteParams,
|
||||
params: _CoreSingleExecuteParams,
|
||||
execution_options: _ExecuteOptions,
|
||||
result: Result[Any],
|
||||
) -> None:
|
||||
"""Intercept high level execute() events after execute.
|
||||
|
||||
|
||||
:param conn: :class:`_engine.Connection` object
|
||||
:param clauseelement: SQL expression construct, :class:`.Compiled`
|
||||
instance, or string statement passed to
|
||||
:meth:`_engine.Connection.execute`.
|
||||
:param multiparams: Multiple parameter sets, a list of dictionaries.
|
||||
:param params: Single parameter set, a single dictionary.
|
||||
:param execution_options: dictionary of execution
|
||||
options passed along with the statement, if any. This is a merge
|
||||
of all options that will be used, including those of the statement,
|
||||
the connection, and those passed in to the method itself for
|
||||
the 2.0 style of execution.
|
||||
|
||||
.. versionadded: 1.4
|
||||
|
||||
:param result: :class:`_engine.CursorResult` generated by the
|
||||
execution.
|
||||
|
||||
"""
|
||||
|
||||
def before_cursor_execute(
|
||||
self,
|
||||
conn: Connection,
|
||||
cursor: DBAPICursor,
|
||||
statement: str,
|
||||
parameters: _DBAPIAnyExecuteParams,
|
||||
context: Optional[ExecutionContext],
|
||||
executemany: bool,
|
||||
) -> Optional[Tuple[str, _DBAPIAnyExecuteParams]]:
|
||||
"""Intercept low-level cursor execute() events before execution,
|
||||
receiving the string SQL statement and DBAPI-specific parameter list to
|
||||
be invoked against a cursor.
|
||||
|
||||
This event is a good choice for logging as well as late modifications
|
||||
to the SQL string. It's less ideal for parameter modifications except
|
||||
for those which are specific to a target backend.
|
||||
|
||||
This event can be optionally established with the ``retval=True``
|
||||
flag. The ``statement`` and ``parameters`` arguments should be
|
||||
returned as a two-tuple in this case::
|
||||
|
||||
@event.listens_for(Engine, "before_cursor_execute", retval=True)
|
||||
def before_cursor_execute(conn, cursor, statement,
|
||||
parameters, context, executemany):
|
||||
# do something with statement, parameters
|
||||
return statement, parameters
|
||||
|
||||
See the example at :class:`_events.ConnectionEvents`.
|
||||
|
||||
:param conn: :class:`_engine.Connection` object
|
||||
:param cursor: DBAPI cursor object
|
||||
:param statement: string SQL statement, as to be passed to the DBAPI
|
||||
:param parameters: Dictionary, tuple, or list of parameters being
|
||||
passed to the ``execute()`` or ``executemany()`` method of the
|
||||
DBAPI ``cursor``. In some cases may be ``None``.
|
||||
:param context: :class:`.ExecutionContext` object in use. May
|
||||
be ``None``.
|
||||
:param executemany: boolean, if ``True``, this is an ``executemany()``
|
||||
call, if ``False``, this is an ``execute()`` call.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`.before_execute`
|
||||
|
||||
:meth:`.after_cursor_execute`
|
||||
|
||||
"""
|
||||
|
||||
def after_cursor_execute(
|
||||
self,
|
||||
conn: Connection,
|
||||
cursor: DBAPICursor,
|
||||
statement: str,
|
||||
parameters: _DBAPIAnyExecuteParams,
|
||||
context: Optional[ExecutionContext],
|
||||
executemany: bool,
|
||||
) -> None:
|
||||
"""Intercept low-level cursor execute() events after execution.
|
||||
|
||||
:param conn: :class:`_engine.Connection` object
|
||||
:param cursor: DBAPI cursor object. Will have results pending
|
||||
if the statement was a SELECT, but these should not be consumed
|
||||
as they will be needed by the :class:`_engine.CursorResult`.
|
||||
:param statement: string SQL statement, as passed to the DBAPI
|
||||
:param parameters: Dictionary, tuple, or list of parameters being
|
||||
passed to the ``execute()`` or ``executemany()`` method of the
|
||||
DBAPI ``cursor``. In some cases may be ``None``.
|
||||
:param context: :class:`.ExecutionContext` object in use. May
|
||||
be ``None``.
|
||||
:param executemany: boolean, if ``True``, this is an ``executemany()``
|
||||
call, if ``False``, this is an ``execute()`` call.
|
||||
|
||||
"""
|
||||
|
||||
@event._legacy_signature(
|
||||
"2.0", ["conn", "branch"], converter=lambda conn: (conn, False)
|
||||
)
|
||||
def engine_connect(self, conn: Connection) -> None:
|
||||
"""Intercept the creation of a new :class:`_engine.Connection`.
|
||||
|
||||
This event is called typically as the direct result of calling
|
||||
the :meth:`_engine.Engine.connect` method.
|
||||
|
||||
It differs from the :meth:`_events.PoolEvents.connect` method, which
|
||||
refers to the actual connection to a database at the DBAPI level;
|
||||
a DBAPI connection may be pooled and reused for many operations.
|
||||
In contrast, this event refers only to the production of a higher level
|
||||
:class:`_engine.Connection` wrapper around such a DBAPI connection.
|
||||
|
||||
It also differs from the :meth:`_events.PoolEvents.checkout` event
|
||||
in that it is specific to the :class:`_engine.Connection` object,
|
||||
not the
|
||||
DBAPI connection that :meth:`_events.PoolEvents.checkout` deals with,
|
||||
although
|
||||
this DBAPI connection is available here via the
|
||||
:attr:`_engine.Connection.connection` attribute.
|
||||
But note there can in fact
|
||||
be multiple :meth:`_events.PoolEvents.checkout`
|
||||
events within the lifespan
|
||||
of a single :class:`_engine.Connection` object, if that
|
||||
:class:`_engine.Connection`
|
||||
is invalidated and re-established.
|
||||
|
||||
:param conn: :class:`_engine.Connection` object.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`_events.PoolEvents.checkout`
|
||||
the lower-level pool checkout event
|
||||
for an individual DBAPI connection
|
||||
|
||||
"""
|
||||
|
||||
def set_connection_execution_options(
|
||||
self, conn: Connection, opts: Dict[str, Any]
|
||||
) -> None:
|
||||
"""Intercept when the :meth:`_engine.Connection.execution_options`
|
||||
method is called.
|
||||
|
||||
This method is called after the new :class:`_engine.Connection`
|
||||
has been
|
||||
produced, with the newly updated execution options collection, but
|
||||
before the :class:`.Dialect` has acted upon any of those new options.
|
||||
|
||||
Note that this method is not called when a new
|
||||
:class:`_engine.Connection`
|
||||
is produced which is inheriting execution options from its parent
|
||||
:class:`_engine.Engine`; to intercept this condition, use the
|
||||
:meth:`_events.ConnectionEvents.engine_connect` event.
|
||||
|
||||
:param conn: The newly copied :class:`_engine.Connection` object
|
||||
|
||||
:param opts: dictionary of options that were passed to the
|
||||
:meth:`_engine.Connection.execution_options` method.
|
||||
This dictionary may be modified in place to affect the ultimate
|
||||
options which take effect.
|
||||
|
||||
.. versionadded:: 2.0 the ``opts`` dictionary may be modified
|
||||
in place.
|
||||
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`_events.ConnectionEvents.set_engine_execution_options`
|
||||
- event
|
||||
which is called when :meth:`_engine.Engine.execution_options`
|
||||
is called.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
def set_engine_execution_options(
|
||||
self, engine: Engine, opts: Dict[str, Any]
|
||||
) -> None:
|
||||
"""Intercept when the :meth:`_engine.Engine.execution_options`
|
||||
method is called.
|
||||
|
||||
The :meth:`_engine.Engine.execution_options` method produces a shallow
|
||||
copy of the :class:`_engine.Engine` which stores the new options.
|
||||
That new
|
||||
:class:`_engine.Engine` is passed here.
|
||||
A particular application of this
|
||||
method is to add a :meth:`_events.ConnectionEvents.engine_connect`
|
||||
event
|
||||
handler to the given :class:`_engine.Engine`
|
||||
which will perform some per-
|
||||
:class:`_engine.Connection` task specific to these execution options.
|
||||
|
||||
:param conn: The newly copied :class:`_engine.Engine` object
|
||||
|
||||
:param opts: dictionary of options that were passed to the
|
||||
:meth:`_engine.Connection.execution_options` method.
|
||||
This dictionary may be modified in place to affect the ultimate
|
||||
options which take effect.
|
||||
|
||||
.. versionadded:: 2.0 the ``opts`` dictionary may be modified
|
||||
in place.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`_events.ConnectionEvents.set_connection_execution_options`
|
||||
- event
|
||||
which is called when :meth:`_engine.Connection.execution_options`
|
||||
is
|
||||
called.
|
||||
|
||||
"""
|
||||
|
||||
def engine_disposed(self, engine: Engine) -> None:
|
||||
"""Intercept when the :meth:`_engine.Engine.dispose` method is called.
|
||||
|
||||
The :meth:`_engine.Engine.dispose` method instructs the engine to
|
||||
"dispose" of it's connection pool (e.g. :class:`_pool.Pool`), and
|
||||
replaces it with a new one. Disposing of the old pool has the
|
||||
effect that existing checked-in connections are closed. The new
|
||||
pool does not establish any new connections until it is first used.
|
||||
|
||||
This event can be used to indicate that resources related to the
|
||||
:class:`_engine.Engine` should also be cleaned up,
|
||||
keeping in mind that the
|
||||
:class:`_engine.Engine`
|
||||
can still be used for new requests in which case
|
||||
it re-acquires connection resources.
|
||||
|
||||
"""
|
||||
|
||||
def begin(self, conn: Connection) -> None:
|
||||
"""Intercept begin() events.
|
||||
|
||||
:param conn: :class:`_engine.Connection` object
|
||||
|
||||
"""
|
||||
|
||||
def rollback(self, conn: Connection) -> None:
|
||||
"""Intercept rollback() events, as initiated by a
|
||||
:class:`.Transaction`.
|
||||
|
||||
Note that the :class:`_pool.Pool` also "auto-rolls back"
|
||||
a DBAPI connection upon checkin, if the ``reset_on_return``
|
||||
flag is set to its default value of ``'rollback'``.
|
||||
To intercept this
|
||||
rollback, use the :meth:`_events.PoolEvents.reset` hook.
|
||||
|
||||
:param conn: :class:`_engine.Connection` object
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`_events.PoolEvents.reset`
|
||||
|
||||
"""
|
||||
|
||||
def commit(self, conn: Connection) -> None:
|
||||
"""Intercept commit() events, as initiated by a
|
||||
:class:`.Transaction`.
|
||||
|
||||
Note that the :class:`_pool.Pool` may also "auto-commit"
|
||||
a DBAPI connection upon checkin, if the ``reset_on_return``
|
||||
flag is set to the value ``'commit'``. To intercept this
|
||||
commit, use the :meth:`_events.PoolEvents.reset` hook.
|
||||
|
||||
:param conn: :class:`_engine.Connection` object
|
||||
"""
|
||||
|
||||
def savepoint(self, conn: Connection, name: str) -> None:
|
||||
"""Intercept savepoint() events.
|
||||
|
||||
:param conn: :class:`_engine.Connection` object
|
||||
:param name: specified name used for the savepoint.
|
||||
|
||||
"""
|
||||
|
||||
def rollback_savepoint(
|
||||
self, conn: Connection, name: str, context: None
|
||||
) -> None:
|
||||
"""Intercept rollback_savepoint() events.
|
||||
|
||||
:param conn: :class:`_engine.Connection` object
|
||||
:param name: specified name used for the savepoint.
|
||||
:param context: not used
|
||||
|
||||
"""
|
||||
# TODO: deprecate "context"
|
||||
|
||||
def release_savepoint(
|
||||
self, conn: Connection, name: str, context: None
|
||||
) -> None:
|
||||
"""Intercept release_savepoint() events.
|
||||
|
||||
:param conn: :class:`_engine.Connection` object
|
||||
:param name: specified name used for the savepoint.
|
||||
:param context: not used
|
||||
|
||||
"""
|
||||
# TODO: deprecate "context"
|
||||
|
||||
def begin_twophase(self, conn: Connection, xid: Any) -> None:
|
||||
"""Intercept begin_twophase() events.
|
||||
|
||||
:param conn: :class:`_engine.Connection` object
|
||||
:param xid: two-phase XID identifier
|
||||
|
||||
"""
|
||||
|
||||
def prepare_twophase(self, conn: Connection, xid: Any) -> None:
|
||||
"""Intercept prepare_twophase() events.
|
||||
|
||||
:param conn: :class:`_engine.Connection` object
|
||||
:param xid: two-phase XID identifier
|
||||
"""
|
||||
|
||||
def rollback_twophase(
|
||||
self, conn: Connection, xid: Any, is_prepared: bool
|
||||
) -> None:
|
||||
"""Intercept rollback_twophase() events.
|
||||
|
||||
:param conn: :class:`_engine.Connection` object
|
||||
:param xid: two-phase XID identifier
|
||||
:param is_prepared: boolean, indicates if
|
||||
:meth:`.TwoPhaseTransaction.prepare` was called.
|
||||
|
||||
"""
|
||||
|
||||
def commit_twophase(
|
||||
self, conn: Connection, xid: Any, is_prepared: bool
|
||||
) -> None:
|
||||
"""Intercept commit_twophase() events.
|
||||
|
||||
:param conn: :class:`_engine.Connection` object
|
||||
:param xid: two-phase XID identifier
|
||||
:param is_prepared: boolean, indicates if
|
||||
:meth:`.TwoPhaseTransaction.prepare` was called.
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class DialectEvents(event.Events[Dialect]):
|
||||
"""event interface for execution-replacement functions.
|
||||
|
||||
These events allow direct instrumentation and replacement
|
||||
of key dialect functions which interact with the DBAPI.
|
||||
|
||||
.. note::
|
||||
|
||||
:class:`.DialectEvents` hooks should be considered **semi-public**
|
||||
and experimental.
|
||||
These hooks are not for general use and are only for those situations
|
||||
where intricate re-statement of DBAPI mechanics must be injected onto
|
||||
an existing dialect. For general-use statement-interception events,
|
||||
please use the :class:`_events.ConnectionEvents` interface.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`_events.ConnectionEvents.before_cursor_execute`
|
||||
|
||||
:meth:`_events.ConnectionEvents.before_execute`
|
||||
|
||||
:meth:`_events.ConnectionEvents.after_cursor_execute`
|
||||
|
||||
:meth:`_events.ConnectionEvents.after_execute`
|
||||
|
||||
"""
|
||||
|
||||
_target_class_doc = "SomeEngine"
|
||||
_dispatch_target = Dialect
|
||||
|
||||
@classmethod
|
||||
def _listen(
|
||||
cls,
|
||||
event_key: event._EventKey[Dialect],
|
||||
*,
|
||||
retval: bool = False,
|
||||
**kw: Any,
|
||||
) -> None:
|
||||
target = event_key.dispatch_target
|
||||
|
||||
target._has_events = True
|
||||
event_key.base_listen()
|
||||
|
||||
@classmethod
|
||||
def _accept_with(
|
||||
cls,
|
||||
target: Union[Engine, Type[Engine], Dialect, Type[Dialect]],
|
||||
identifier: str,
|
||||
) -> Optional[Union[Dialect, Type[Dialect]]]:
|
||||
if isinstance(target, type):
|
||||
if issubclass(target, Engine):
|
||||
return Dialect
|
||||
elif issubclass(target, Dialect):
|
||||
return target
|
||||
elif isinstance(target, Engine):
|
||||
return target.dialect
|
||||
elif isinstance(target, Dialect):
|
||||
return target
|
||||
elif isinstance(target, Connection) and identifier == "handle_error":
|
||||
raise exc.InvalidRequestError(
|
||||
"The handle_error() event hook as of SQLAlchemy 2.0 is "
|
||||
"established on the Dialect, and may only be applied to the "
|
||||
"Engine as a whole or to a specific Dialect as a whole, "
|
||||
"not on a per-Connection basis."
|
||||
)
|
||||
elif hasattr(target, "_no_async_engine_events"):
|
||||
target._no_async_engine_events()
|
||||
else:
|
||||
return None
|
||||
|
||||
def handle_error(
|
||||
self, exception_context: ExceptionContext
|
||||
) -> Optional[BaseException]:
|
||||
r"""Intercept all exceptions processed by the
|
||||
:class:`_engine.Dialect`, typically but not limited to those
|
||||
emitted within the scope of a :class:`_engine.Connection`.
|
||||
|
||||
.. versionchanged:: 2.0 the :meth:`.DialectEvents.handle_error` event
|
||||
is moved to the :class:`.DialectEvents` class, moved from the
|
||||
:class:`.ConnectionEvents` class, so that it may also participate in
|
||||
the "pre ping" operation configured with the
|
||||
:paramref:`_sa.create_engine.pool_pre_ping` parameter. The event
|
||||
remains registered by using the :class:`_engine.Engine` as the event
|
||||
target, however note that using the :class:`_engine.Connection` as
|
||||
an event target for :meth:`.DialectEvents.handle_error` is no longer
|
||||
supported.
|
||||
|
||||
This includes all exceptions emitted by the DBAPI as well as
|
||||
within SQLAlchemy's statement invocation process, including
|
||||
encoding errors and other statement validation errors. Other areas
|
||||
in which the event is invoked include transaction begin and end,
|
||||
result row fetching, cursor creation.
|
||||
|
||||
Note that :meth:`.handle_error` may support new kinds of exceptions
|
||||
and new calling scenarios at *any time*. Code which uses this
|
||||
event must expect new calling patterns to be present in minor
|
||||
releases.
|
||||
|
||||
To support the wide variety of members that correspond to an exception,
|
||||
as well as to allow extensibility of the event without backwards
|
||||
incompatibility, the sole argument received is an instance of
|
||||
:class:`.ExceptionContext`. This object contains data members
|
||||
representing detail about the exception.
|
||||
|
||||
Use cases supported by this hook include:
|
||||
|
||||
* read-only, low-level exception handling for logging and
|
||||
debugging purposes
|
||||
* Establishing whether a DBAPI connection error message indicates
|
||||
that the database connection needs to be reconnected, including
|
||||
for the "pre_ping" handler used by **some** dialects
|
||||
* Establishing or disabling whether a connection or the owning
|
||||
connection pool is invalidated or expired in response to a
|
||||
specific exception
|
||||
* exception re-writing
|
||||
|
||||
The hook is called while the cursor from the failed operation
|
||||
(if any) is still open and accessible. Special cleanup operations
|
||||
can be called on this cursor; SQLAlchemy will attempt to close
|
||||
this cursor subsequent to this hook being invoked.
|
||||
|
||||
As of SQLAlchemy 2.0, the "pre_ping" handler enabled using the
|
||||
:paramref:`_sa.create_engine.pool_pre_ping` parameter will also
|
||||
participate in the :meth:`.handle_error` process, **for those dialects
|
||||
that rely upon disconnect codes to detect database liveness**. Note
|
||||
that some dialects such as psycopg, psycopg2, and most MySQL dialects
|
||||
make use of a native ``ping()`` method supplied by the DBAPI which does
|
||||
not make use of disconnect codes.
|
||||
|
||||
.. versionchanged:: 2.0.0 The :meth:`.DialectEvents.handle_error`
|
||||
event hook participates in connection pool "pre-ping" operations.
|
||||
Within this usage, the :attr:`.ExceptionContext.engine` attribute
|
||||
will be ``None``, however the :class:`.Dialect` in use is always
|
||||
available via the :attr:`.ExceptionContext.dialect` attribute.
|
||||
|
||||
.. versionchanged:: 2.0.5 Added :attr:`.ExceptionContext.is_pre_ping`
|
||||
attribute which will be set to ``True`` when the
|
||||
:meth:`.DialectEvents.handle_error` event hook is triggered within
|
||||
a connection pool pre-ping operation.
|
||||
|
||||
.. versionchanged:: 2.0.5 An issue was repaired that allows for the
|
||||
PostgreSQL ``psycopg`` and ``psycopg2`` drivers, as well as all
|
||||
MySQL drivers, to properly participate in the
|
||||
:meth:`.DialectEvents.handle_error` event hook during
|
||||
connection pool "pre-ping" operations; previously, the
|
||||
implementation was non-working for these drivers.
|
||||
|
||||
|
||||
A handler function has two options for replacing
|
||||
the SQLAlchemy-constructed exception into one that is user
|
||||
defined. It can either raise this new exception directly, in
|
||||
which case all further event listeners are bypassed and the
|
||||
exception will be raised, after appropriate cleanup as taken
|
||||
place::
|
||||
|
||||
@event.listens_for(Engine, "handle_error")
|
||||
def handle_exception(context):
|
||||
if isinstance(context.original_exception,
|
||||
psycopg2.OperationalError) and \
|
||||
"failed" in str(context.original_exception):
|
||||
raise MySpecialException("failed operation")
|
||||
|
||||
.. warning:: Because the
|
||||
:meth:`_events.DialectEvents.handle_error`
|
||||
event specifically provides for exceptions to be re-thrown as
|
||||
the ultimate exception raised by the failed statement,
|
||||
**stack traces will be misleading** if the user-defined event
|
||||
handler itself fails and throws an unexpected exception;
|
||||
the stack trace may not illustrate the actual code line that
|
||||
failed! It is advised to code carefully here and use
|
||||
logging and/or inline debugging if unexpected exceptions are
|
||||
occurring.
|
||||
|
||||
Alternatively, a "chained" style of event handling can be
|
||||
used, by configuring the handler with the ``retval=True``
|
||||
modifier and returning the new exception instance from the
|
||||
function. In this case, event handling will continue onto the
|
||||
next handler. The "chained" exception is available using
|
||||
:attr:`.ExceptionContext.chained_exception`::
|
||||
|
||||
@event.listens_for(Engine, "handle_error", retval=True)
|
||||
def handle_exception(context):
|
||||
if context.chained_exception is not None and \
|
||||
"special" in context.chained_exception.message:
|
||||
return MySpecialException("failed",
|
||||
cause=context.chained_exception)
|
||||
|
||||
Handlers that return ``None`` may be used within the chain; when
|
||||
a handler returns ``None``, the previous exception instance,
|
||||
if any, is maintained as the current exception that is passed onto the
|
||||
next handler.
|
||||
|
||||
When a custom exception is raised or returned, SQLAlchemy raises
|
||||
this new exception as-is, it is not wrapped by any SQLAlchemy
|
||||
object. If the exception is not a subclass of
|
||||
:class:`sqlalchemy.exc.StatementError`,
|
||||
certain features may not be available; currently this includes
|
||||
the ORM's feature of adding a detail hint about "autoflush" to
|
||||
exceptions raised within the autoflush process.
|
||||
|
||||
:param context: an :class:`.ExceptionContext` object. See this
|
||||
class for details on all available members.
|
||||
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`pool_new_disconnect_codes`
|
||||
|
||||
"""
|
||||
|
||||
def do_connect(
|
||||
self,
|
||||
dialect: Dialect,
|
||||
conn_rec: ConnectionPoolEntry,
|
||||
cargs: Tuple[Any, ...],
|
||||
cparams: Dict[str, Any],
|
||||
) -> Optional[DBAPIConnection]:
|
||||
"""Receive connection arguments before a connection is made.
|
||||
|
||||
This event is useful in that it allows the handler to manipulate the
|
||||
cargs and/or cparams collections that control how the DBAPI
|
||||
``connect()`` function will be called. ``cargs`` will always be a
|
||||
Python list that can be mutated in-place, and ``cparams`` a Python
|
||||
dictionary that may also be mutated::
|
||||
|
||||
e = create_engine("postgresql+psycopg2://user@host/dbname")
|
||||
|
||||
@event.listens_for(e, 'do_connect')
|
||||
def receive_do_connect(dialect, conn_rec, cargs, cparams):
|
||||
cparams["password"] = "some_password"
|
||||
|
||||
The event hook may also be used to override the call to ``connect()``
|
||||
entirely, by returning a non-``None`` DBAPI connection object::
|
||||
|
||||
e = create_engine("postgresql+psycopg2://user@host/dbname")
|
||||
|
||||
@event.listens_for(e, 'do_connect')
|
||||
def receive_do_connect(dialect, conn_rec, cargs, cparams):
|
||||
return psycopg2.connect(*cargs, **cparams)
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`custom_dbapi_args`
|
||||
|
||||
"""
|
||||
|
||||
def do_executemany(
|
||||
self,
|
||||
cursor: DBAPICursor,
|
||||
statement: str,
|
||||
parameters: _DBAPIMultiExecuteParams,
|
||||
context: ExecutionContext,
|
||||
) -> Optional[Literal[True]]:
|
||||
"""Receive a cursor to have executemany() called.
|
||||
|
||||
Return the value True to halt further events from invoking,
|
||||
and to indicate that the cursor execution has already taken
|
||||
place within the event handler.
|
||||
|
||||
"""
|
||||
|
||||
def do_execute_no_params(
|
||||
self, cursor: DBAPICursor, statement: str, context: ExecutionContext
|
||||
) -> Optional[Literal[True]]:
|
||||
"""Receive a cursor to have execute() with no parameters called.
|
||||
|
||||
Return the value True to halt further events from invoking,
|
||||
and to indicate that the cursor execution has already taken
|
||||
place within the event handler.
|
||||
|
||||
"""
|
||||
|
||||
def do_execute(
|
||||
self,
|
||||
cursor: DBAPICursor,
|
||||
statement: str,
|
||||
parameters: _DBAPISingleExecuteParams,
|
||||
context: ExecutionContext,
|
||||
) -> Optional[Literal[True]]:
|
||||
"""Receive a cursor to have execute() called.
|
||||
|
||||
Return the value True to halt further events from invoking,
|
||||
and to indicate that the cursor execution has already taken
|
||||
place within the event handler.
|
||||
|
||||
"""
|
||||
|
||||
def do_setinputsizes(
|
||||
self,
|
||||
inputsizes: Dict[BindParameter[Any], Any],
|
||||
cursor: DBAPICursor,
|
||||
statement: str,
|
||||
parameters: _DBAPIAnyExecuteParams,
|
||||
context: ExecutionContext,
|
||||
) -> None:
|
||||
"""Receive the setinputsizes dictionary for possible modification.
|
||||
|
||||
This event is emitted in the case where the dialect makes use of the
|
||||
DBAPI ``cursor.setinputsizes()`` method which passes information about
|
||||
parameter binding for a particular statement. The given
|
||||
``inputsizes`` dictionary will contain :class:`.BindParameter` objects
|
||||
as keys, linked to DBAPI-specific type objects as values; for
|
||||
parameters that are not bound, they are added to the dictionary with
|
||||
``None`` as the value, which means the parameter will not be included
|
||||
in the ultimate setinputsizes call. The event may be used to inspect
|
||||
and/or log the datatypes that are being bound, as well as to modify the
|
||||
dictionary in place. Parameters can be added, modified, or removed
|
||||
from this dictionary. Callers will typically want to inspect the
|
||||
:attr:`.BindParameter.type` attribute of the given bind objects in
|
||||
order to make decisions about the DBAPI object.
|
||||
|
||||
After the event, the ``inputsizes`` dictionary is converted into
|
||||
an appropriate datastructure to be passed to ``cursor.setinputsizes``;
|
||||
either a list for a positional bound parameter execution style,
|
||||
or a dictionary of string parameter keys to DBAPI type objects for
|
||||
a named bound parameter execution style.
|
||||
|
||||
The setinputsizes hook overall is only used for dialects which include
|
||||
the flag ``use_setinputsizes=True``. Dialects which use this
|
||||
include cx_Oracle, pg8000, asyncpg, and pyodbc dialects.
|
||||
|
||||
.. note::
|
||||
|
||||
For use with pyodbc, the ``use_setinputsizes`` flag
|
||||
must be passed to the dialect, e.g.::
|
||||
|
||||
create_engine("mssql+pyodbc://...", use_setinputsizes=True)
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`mssql_pyodbc_setinputsizes`
|
||||
|
||||
.. versionadded:: 1.2.9
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`cx_oracle_setinputsizes`
|
||||
|
||||
"""
|
||||
pass
|
File diff suppressed because it is too large
Load diff
131
elitebot/lib/python3.11/site-packages/sqlalchemy/engine/mock.py
Normal file
131
elitebot/lib/python3.11/site-packages/sqlalchemy/engine/mock.py
Normal file
|
@ -0,0 +1,131 @@
|
|||
# engine/mock.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from operator import attrgetter
|
||||
import typing
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import cast
|
||||
from typing import Optional
|
||||
from typing import Type
|
||||
from typing import Union
|
||||
|
||||
from . import url as _url
|
||||
from .. import util
|
||||
|
||||
|
||||
if typing.TYPE_CHECKING:
|
||||
from .base import Engine
|
||||
from .interfaces import _CoreAnyExecuteParams
|
||||
from .interfaces import CoreExecuteOptionsParameter
|
||||
from .interfaces import Dialect
|
||||
from .url import URL
|
||||
from ..sql.base import Executable
|
||||
from ..sql.ddl import SchemaDropper
|
||||
from ..sql.ddl import SchemaGenerator
|
||||
from ..sql.schema import HasSchemaAttr
|
||||
from ..sql.schema import SchemaItem
|
||||
|
||||
|
||||
class MockConnection:
|
||||
def __init__(self, dialect: Dialect, execute: Callable[..., Any]):
|
||||
self._dialect = dialect
|
||||
self._execute_impl = execute
|
||||
|
||||
engine: Engine = cast(Any, property(lambda s: s))
|
||||
dialect: Dialect = cast(Any, property(attrgetter("_dialect")))
|
||||
name: str = cast(Any, property(lambda s: s._dialect.name))
|
||||
|
||||
def connect(self, **kwargs: Any) -> MockConnection:
|
||||
return self
|
||||
|
||||
def schema_for_object(self, obj: HasSchemaAttr) -> Optional[str]:
|
||||
return obj.schema
|
||||
|
||||
def execution_options(self, **kw: Any) -> MockConnection:
|
||||
return self
|
||||
|
||||
def _run_ddl_visitor(
|
||||
self,
|
||||
visitorcallable: Type[Union[SchemaGenerator, SchemaDropper]],
|
||||
element: SchemaItem,
|
||||
**kwargs: Any,
|
||||
) -> None:
|
||||
kwargs["checkfirst"] = False
|
||||
visitorcallable(self.dialect, self, **kwargs).traverse_single(element)
|
||||
|
||||
def execute(
|
||||
self,
|
||||
obj: Executable,
|
||||
parameters: Optional[_CoreAnyExecuteParams] = None,
|
||||
execution_options: Optional[CoreExecuteOptionsParameter] = None,
|
||||
) -> Any:
|
||||
return self._execute_impl(obj, parameters)
|
||||
|
||||
|
||||
def create_mock_engine(
|
||||
url: Union[str, URL], executor: Any, **kw: Any
|
||||
) -> MockConnection:
|
||||
"""Create a "mock" engine used for echoing DDL.
|
||||
|
||||
This is a utility function used for debugging or storing the output of DDL
|
||||
sequences as generated by :meth:`_schema.MetaData.create_all`
|
||||
and related methods.
|
||||
|
||||
The function accepts a URL which is used only to determine the kind of
|
||||
dialect to be used, as well as an "executor" callable function which
|
||||
will receive a SQL expression object and parameters, which can then be
|
||||
echoed or otherwise printed. The executor's return value is not handled,
|
||||
nor does the engine allow regular string statements to be invoked, and
|
||||
is therefore only useful for DDL that is sent to the database without
|
||||
receiving any results.
|
||||
|
||||
E.g.::
|
||||
|
||||
from sqlalchemy import create_mock_engine
|
||||
|
||||
def dump(sql, *multiparams, **params):
|
||||
print(sql.compile(dialect=engine.dialect))
|
||||
|
||||
engine = create_mock_engine('postgresql+psycopg2://', dump)
|
||||
metadata.create_all(engine, checkfirst=False)
|
||||
|
||||
:param url: A string URL which typically needs to contain only the
|
||||
database backend name.
|
||||
|
||||
:param executor: a callable which receives the arguments ``sql``,
|
||||
``*multiparams`` and ``**params``. The ``sql`` parameter is typically
|
||||
an instance of :class:`.ExecutableDDLElement`, which can then be compiled
|
||||
into a string using :meth:`.ExecutableDDLElement.compile`.
|
||||
|
||||
.. versionadded:: 1.4 - the :func:`.create_mock_engine` function replaces
|
||||
the previous "mock" engine strategy used with
|
||||
:func:`_sa.create_engine`.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`faq_ddl_as_string`
|
||||
|
||||
"""
|
||||
|
||||
# create url.URL object
|
||||
u = _url.make_url(url)
|
||||
|
||||
dialect_cls = u.get_dialect()
|
||||
|
||||
dialect_args = {}
|
||||
# consume dialect arguments from kwargs
|
||||
for k in util.get_cls_kwargs(dialect_cls):
|
||||
if k in kw:
|
||||
dialect_args[k] = kw.pop(k)
|
||||
|
||||
# create dialect
|
||||
dialect = dialect_cls(**dialect_args)
|
||||
|
||||
return MockConnection(dialect, executor)
|
|
@ -0,0 +1,61 @@
|
|||
# engine/processors.py
|
||||
# Copyright (C) 2010-2024 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
# Copyright (C) 2010 Gaetan de Menten gdementen@gmail.com
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""defines generic type conversion functions, as used in bind and result
|
||||
processors.
|
||||
|
||||
They all share one common characteristic: None is passed through unchanged.
|
||||
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import typing
|
||||
|
||||
from ._py_processors import str_to_datetime_processor_factory # noqa
|
||||
from ..util._has_cy import HAS_CYEXTENSION
|
||||
|
||||
if typing.TYPE_CHECKING or not HAS_CYEXTENSION:
|
||||
from ._py_processors import int_to_boolean as int_to_boolean
|
||||
from ._py_processors import str_to_date as str_to_date
|
||||
from ._py_processors import str_to_datetime as str_to_datetime
|
||||
from ._py_processors import str_to_time as str_to_time
|
||||
from ._py_processors import (
|
||||
to_decimal_processor_factory as to_decimal_processor_factory,
|
||||
)
|
||||
from ._py_processors import to_float as to_float
|
||||
from ._py_processors import to_str as to_str
|
||||
else:
|
||||
from sqlalchemy.cyextension.processors import (
|
||||
DecimalResultProcessor,
|
||||
)
|
||||
from sqlalchemy.cyextension.processors import ( # noqa: F401
|
||||
int_to_boolean as int_to_boolean,
|
||||
)
|
||||
from sqlalchemy.cyextension.processors import ( # noqa: F401,E501
|
||||
str_to_date as str_to_date,
|
||||
)
|
||||
from sqlalchemy.cyextension.processors import ( # noqa: F401
|
||||
str_to_datetime as str_to_datetime,
|
||||
)
|
||||
from sqlalchemy.cyextension.processors import ( # noqa: F401,E501
|
||||
str_to_time as str_to_time,
|
||||
)
|
||||
from sqlalchemy.cyextension.processors import ( # noqa: F401,E501
|
||||
to_float as to_float,
|
||||
)
|
||||
from sqlalchemy.cyextension.processors import ( # noqa: F401,E501
|
||||
to_str as to_str,
|
||||
)
|
||||
|
||||
def to_decimal_processor_factory(target_class, scale):
|
||||
# Note that the scale argument is not taken into account for integer
|
||||
# values in the C implementation while it is in the Python one.
|
||||
# For example, the Python implementation might return
|
||||
# Decimal('5.00000') whereas the C implementation will
|
||||
# return Decimal('5'). These are equivalent of course.
|
||||
return DecimalResultProcessor(target_class, "%%.%df" % scale).process
|
File diff suppressed because it is too large
Load diff
2382
elitebot/lib/python3.11/site-packages/sqlalchemy/engine/result.py
Normal file
2382
elitebot/lib/python3.11/site-packages/sqlalchemy/engine/result.py
Normal file
File diff suppressed because it is too large
Load diff
401
elitebot/lib/python3.11/site-packages/sqlalchemy/engine/row.py
Normal file
401
elitebot/lib/python3.11/site-packages/sqlalchemy/engine/row.py
Normal file
|
@ -0,0 +1,401 @@
|
|||
# engine/row.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Define row constructs including :class:`.Row`."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from abc import ABC
|
||||
import collections.abc as collections_abc
|
||||
import operator
|
||||
import typing
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import Dict
|
||||
from typing import Generic
|
||||
from typing import Iterator
|
||||
from typing import List
|
||||
from typing import Mapping
|
||||
from typing import NoReturn
|
||||
from typing import Optional
|
||||
from typing import overload
|
||||
from typing import Sequence
|
||||
from typing import Tuple
|
||||
from typing import TYPE_CHECKING
|
||||
from typing import TypeVar
|
||||
from typing import Union
|
||||
|
||||
from ..sql import util as sql_util
|
||||
from ..util import deprecated
|
||||
from ..util._has_cy import HAS_CYEXTENSION
|
||||
|
||||
if TYPE_CHECKING or not HAS_CYEXTENSION:
|
||||
from ._py_row import BaseRow as BaseRow
|
||||
else:
|
||||
from sqlalchemy.cyextension.resultproxy import BaseRow as BaseRow
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .result import _KeyType
|
||||
from .result import _ProcessorsType
|
||||
from .result import RMKeyView
|
||||
|
||||
_T = TypeVar("_T", bound=Any)
|
||||
_TP = TypeVar("_TP", bound=Tuple[Any, ...])
|
||||
|
||||
|
||||
class Row(BaseRow, Sequence[Any], Generic[_TP]):
|
||||
"""Represent a single result row.
|
||||
|
||||
The :class:`.Row` object represents a row of a database result. It is
|
||||
typically associated in the 1.x series of SQLAlchemy with the
|
||||
:class:`_engine.CursorResult` object, however is also used by the ORM for
|
||||
tuple-like results as of SQLAlchemy 1.4.
|
||||
|
||||
The :class:`.Row` object seeks to act as much like a Python named
|
||||
tuple as possible. For mapping (i.e. dictionary) behavior on a row,
|
||||
such as testing for containment of keys, refer to the :attr:`.Row._mapping`
|
||||
attribute.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`tutorial_selecting_data` - includes examples of selecting
|
||||
rows from SELECT statements.
|
||||
|
||||
.. versionchanged:: 1.4
|
||||
|
||||
Renamed ``RowProxy`` to :class:`.Row`. :class:`.Row` is no longer a
|
||||
"proxy" object in that it contains the final form of data within it,
|
||||
and now acts mostly like a named tuple. Mapping-like functionality is
|
||||
moved to the :attr:`.Row._mapping` attribute. See
|
||||
:ref:`change_4710_core` for background on this change.
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
def __setattr__(self, name: str, value: Any) -> NoReturn:
|
||||
raise AttributeError("can't set attribute")
|
||||
|
||||
def __delattr__(self, name: str) -> NoReturn:
|
||||
raise AttributeError("can't delete attribute")
|
||||
|
||||
def _tuple(self) -> _TP:
|
||||
"""Return a 'tuple' form of this :class:`.Row`.
|
||||
|
||||
At runtime, this method returns "self"; the :class:`.Row` object is
|
||||
already a named tuple. However, at the typing level, if this
|
||||
:class:`.Row` is typed, the "tuple" return type will be a :pep:`484`
|
||||
``Tuple`` datatype that contains typing information about individual
|
||||
elements, supporting typed unpacking and attribute access.
|
||||
|
||||
.. versionadded:: 2.0.19 - The :meth:`.Row._tuple` method supersedes
|
||||
the previous :meth:`.Row.tuple` method, which is now underscored
|
||||
to avoid name conflicts with column names in the same way as other
|
||||
named-tuple methods on :class:`.Row`.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.Row._t` - shorthand attribute notation
|
||||
|
||||
:meth:`.Result.tuples`
|
||||
|
||||
|
||||
"""
|
||||
return self # type: ignore
|
||||
|
||||
@deprecated(
|
||||
"2.0.19",
|
||||
"The :meth:`.Row.tuple` method is deprecated in favor of "
|
||||
":meth:`.Row._tuple`; all :class:`.Row` "
|
||||
"methods and library-level attributes are intended to be underscored "
|
||||
"to avoid name conflicts. Please use :meth:`Row._tuple`.",
|
||||
)
|
||||
def tuple(self) -> _TP:
|
||||
"""Return a 'tuple' form of this :class:`.Row`.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
"""
|
||||
return self._tuple()
|
||||
|
||||
@property
|
||||
def _t(self) -> _TP:
|
||||
"""A synonym for :meth:`.Row._tuple`.
|
||||
|
||||
.. versionadded:: 2.0.19 - The :attr:`.Row._t` attribute supersedes
|
||||
the previous :attr:`.Row.t` attribute, which is now underscored
|
||||
to avoid name conflicts with column names in the same way as other
|
||||
named-tuple methods on :class:`.Row`.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.Result.t`
|
||||
"""
|
||||
return self # type: ignore
|
||||
|
||||
@property
|
||||
@deprecated(
|
||||
"2.0.19",
|
||||
"The :attr:`.Row.t` attribute is deprecated in favor of "
|
||||
":attr:`.Row._t`; all :class:`.Row` "
|
||||
"methods and library-level attributes are intended to be underscored "
|
||||
"to avoid name conflicts. Please use :attr:`Row._t`.",
|
||||
)
|
||||
def t(self) -> _TP:
|
||||
"""A synonym for :meth:`.Row._tuple`.
|
||||
|
||||
.. versionadded:: 2.0
|
||||
|
||||
"""
|
||||
return self._t
|
||||
|
||||
@property
|
||||
def _mapping(self) -> RowMapping:
|
||||
"""Return a :class:`.RowMapping` for this :class:`.Row`.
|
||||
|
||||
This object provides a consistent Python mapping (i.e. dictionary)
|
||||
interface for the data contained within the row. The :class:`.Row`
|
||||
by itself behaves like a named tuple.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.Row._fields`
|
||||
|
||||
.. versionadded:: 1.4
|
||||
|
||||
"""
|
||||
return RowMapping(self._parent, None, self._key_to_index, self._data)
|
||||
|
||||
def _filter_on_values(
|
||||
self, processor: Optional[_ProcessorsType]
|
||||
) -> Row[Any]:
|
||||
return Row(self._parent, processor, self._key_to_index, self._data)
|
||||
|
||||
if not TYPE_CHECKING:
|
||||
|
||||
def _special_name_accessor(name: str) -> Any:
|
||||
"""Handle ambiguous names such as "count" and "index" """
|
||||
|
||||
@property
|
||||
def go(self: Row) -> Any:
|
||||
if self._parent._has_key(name):
|
||||
return self.__getattr__(name)
|
||||
else:
|
||||
|
||||
def meth(*arg: Any, **kw: Any) -> Any:
|
||||
return getattr(collections_abc.Sequence, name)(
|
||||
self, *arg, **kw
|
||||
)
|
||||
|
||||
return meth
|
||||
|
||||
return go
|
||||
|
||||
count = _special_name_accessor("count")
|
||||
index = _special_name_accessor("index")
|
||||
|
||||
def __contains__(self, key: Any) -> bool:
|
||||
return key in self._data
|
||||
|
||||
def _op(self, other: Any, op: Callable[[Any, Any], bool]) -> bool:
|
||||
return (
|
||||
op(self._to_tuple_instance(), other._to_tuple_instance())
|
||||
if isinstance(other, Row)
|
||||
else op(self._to_tuple_instance(), other)
|
||||
)
|
||||
|
||||
__hash__ = BaseRow.__hash__
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
||||
@overload
|
||||
def __getitem__(self, index: int) -> Any: ...
|
||||
|
||||
@overload
|
||||
def __getitem__(self, index: slice) -> Sequence[Any]: ...
|
||||
|
||||
def __getitem__(self, index: Union[int, slice]) -> Any: ...
|
||||
|
||||
def __lt__(self, other: Any) -> bool:
|
||||
return self._op(other, operator.lt)
|
||||
|
||||
def __le__(self, other: Any) -> bool:
|
||||
return self._op(other, operator.le)
|
||||
|
||||
def __ge__(self, other: Any) -> bool:
|
||||
return self._op(other, operator.ge)
|
||||
|
||||
def __gt__(self, other: Any) -> bool:
|
||||
return self._op(other, operator.gt)
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
return self._op(other, operator.eq)
|
||||
|
||||
def __ne__(self, other: Any) -> bool:
|
||||
return self._op(other, operator.ne)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return repr(sql_util._repr_row(self))
|
||||
|
||||
@property
|
||||
def _fields(self) -> Tuple[str, ...]:
|
||||
"""Return a tuple of string keys as represented by this
|
||||
:class:`.Row`.
|
||||
|
||||
The keys can represent the labels of the columns returned by a core
|
||||
statement or the names of the orm classes returned by an orm
|
||||
execution.
|
||||
|
||||
This attribute is analogous to the Python named tuple ``._fields``
|
||||
attribute.
|
||||
|
||||
.. versionadded:: 1.4
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.Row._mapping`
|
||||
|
||||
"""
|
||||
return tuple([k for k in self._parent.keys if k is not None])
|
||||
|
||||
def _asdict(self) -> Dict[str, Any]:
|
||||
"""Return a new dict which maps field names to their corresponding
|
||||
values.
|
||||
|
||||
This method is analogous to the Python named tuple ``._asdict()``
|
||||
method, and works by applying the ``dict()`` constructor to the
|
||||
:attr:`.Row._mapping` attribute.
|
||||
|
||||
.. versionadded:: 1.4
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`.Row._mapping`
|
||||
|
||||
"""
|
||||
return dict(self._mapping)
|
||||
|
||||
|
||||
BaseRowProxy = BaseRow
|
||||
RowProxy = Row
|
||||
|
||||
|
||||
class ROMappingView(ABC):
|
||||
__slots__ = ()
|
||||
|
||||
_items: Sequence[Any]
|
||||
_mapping: Mapping["_KeyType", Any]
|
||||
|
||||
def __init__(
|
||||
self, mapping: Mapping["_KeyType", Any], items: Sequence[Any]
|
||||
):
|
||||
self._mapping = mapping # type: ignore[misc]
|
||||
self._items = items # type: ignore[misc]
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self._items)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return "{0.__class__.__name__}({0._mapping!r})".format(self)
|
||||
|
||||
def __iter__(self) -> Iterator[Any]:
|
||||
return iter(self._items)
|
||||
|
||||
def __contains__(self, item: Any) -> bool:
|
||||
return item in self._items
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
return list(other) == list(self)
|
||||
|
||||
def __ne__(self, other: Any) -> bool:
|
||||
return list(other) != list(self)
|
||||
|
||||
|
||||
class ROMappingKeysValuesView(
|
||||
ROMappingView, typing.KeysView["_KeyType"], typing.ValuesView[Any]
|
||||
):
|
||||
__slots__ = ("_items",) # mapping slot is provided by KeysView
|
||||
|
||||
|
||||
class ROMappingItemsView(ROMappingView, typing.ItemsView["_KeyType", Any]):
|
||||
__slots__ = ("_items",) # mapping slot is provided by ItemsView
|
||||
|
||||
|
||||
class RowMapping(BaseRow, typing.Mapping["_KeyType", Any]):
|
||||
"""A ``Mapping`` that maps column names and objects to :class:`.Row`
|
||||
values.
|
||||
|
||||
The :class:`.RowMapping` is available from a :class:`.Row` via the
|
||||
:attr:`.Row._mapping` attribute, as well as from the iterable interface
|
||||
provided by the :class:`.MappingResult` object returned by the
|
||||
:meth:`_engine.Result.mappings` method.
|
||||
|
||||
:class:`.RowMapping` supplies Python mapping (i.e. dictionary) access to
|
||||
the contents of the row. This includes support for testing of
|
||||
containment of specific keys (string column names or objects), as well
|
||||
as iteration of keys, values, and items::
|
||||
|
||||
for row in result:
|
||||
if 'a' in row._mapping:
|
||||
print("Column 'a': %s" % row._mapping['a'])
|
||||
|
||||
print("Column b: %s" % row._mapping[table.c.b])
|
||||
|
||||
|
||||
.. versionadded:: 1.4 The :class:`.RowMapping` object replaces the
|
||||
mapping-like access previously provided by a database result row,
|
||||
which now seeks to behave mostly like a named tuple.
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
if TYPE_CHECKING:
|
||||
|
||||
def __getitem__(self, key: _KeyType) -> Any: ...
|
||||
|
||||
else:
|
||||
__getitem__ = BaseRow._get_by_key_impl_mapping
|
||||
|
||||
def _values_impl(self) -> List[Any]:
|
||||
return list(self._data)
|
||||
|
||||
def __iter__(self) -> Iterator[str]:
|
||||
return (k for k in self._parent.keys if k is not None)
|
||||
|
||||
def __len__(self) -> int:
|
||||
return len(self._data)
|
||||
|
||||
def __contains__(self, key: object) -> bool:
|
||||
return self._parent._has_key(key)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return repr(dict(self))
|
||||
|
||||
def items(self) -> ROMappingItemsView:
|
||||
"""Return a view of key/value tuples for the elements in the
|
||||
underlying :class:`.Row`.
|
||||
|
||||
"""
|
||||
return ROMappingItemsView(
|
||||
self, [(key, self[key]) for key in self.keys()]
|
||||
)
|
||||
|
||||
def keys(self) -> RMKeyView:
|
||||
"""Return a view of 'keys' for string column names represented
|
||||
by the underlying :class:`.Row`.
|
||||
|
||||
"""
|
||||
|
||||
return self._parent.keys
|
||||
|
||||
def values(self) -> ROMappingKeysValuesView:
|
||||
"""Return a view of values for the values represented in the
|
||||
underlying :class:`.Row`.
|
||||
|
||||
"""
|
||||
return ROMappingKeysValuesView(self, self._values_impl())
|
|
@ -0,0 +1,19 @@
|
|||
# engine/strategies.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Deprecated mock engine strategy used by Alembic.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from .mock import MockConnection # noqa
|
||||
|
||||
|
||||
class MockEngineStrategy:
|
||||
MockConnection = MockConnection
|
910
elitebot/lib/python3.11/site-packages/sqlalchemy/engine/url.py
Normal file
910
elitebot/lib/python3.11/site-packages/sqlalchemy/engine/url.py
Normal file
|
@ -0,0 +1,910 @@
|
|||
# engine/url.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
"""Provides the :class:`~sqlalchemy.engine.url.URL` class which encapsulates
|
||||
information about a database connection specification.
|
||||
|
||||
The URL object is created automatically when
|
||||
:func:`~sqlalchemy.engine.create_engine` is called with a string
|
||||
argument; alternatively, the URL is a public-facing construct which can
|
||||
be used directly and is also accepted directly by ``create_engine()``.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import collections.abc as collections_abc
|
||||
import re
|
||||
from typing import Any
|
||||
from typing import cast
|
||||
from typing import Dict
|
||||
from typing import Iterable
|
||||
from typing import List
|
||||
from typing import Mapping
|
||||
from typing import NamedTuple
|
||||
from typing import Optional
|
||||
from typing import overload
|
||||
from typing import Sequence
|
||||
from typing import Tuple
|
||||
from typing import Type
|
||||
from typing import Union
|
||||
from urllib.parse import parse_qsl
|
||||
from urllib.parse import quote
|
||||
from urllib.parse import quote_plus
|
||||
from urllib.parse import unquote
|
||||
|
||||
from .interfaces import Dialect
|
||||
from .. import exc
|
||||
from .. import util
|
||||
from ..dialects import plugins
|
||||
from ..dialects import registry
|
||||
|
||||
|
||||
class URL(NamedTuple):
|
||||
"""
|
||||
Represent the components of a URL used to connect to a database.
|
||||
|
||||
URLs are typically constructed from a fully formatted URL string, where the
|
||||
:func:`.make_url` function is used internally by the
|
||||
:func:`_sa.create_engine` function in order to parse the URL string into
|
||||
its individual components, which are then used to construct a new
|
||||
:class:`.URL` object. When parsing from a formatted URL string, the parsing
|
||||
format generally follows
|
||||
`RFC-1738 <https://www.ietf.org/rfc/rfc1738.txt>`_, with some exceptions.
|
||||
|
||||
A :class:`_engine.URL` object may also be produced directly, either by
|
||||
using the :func:`.make_url` function with a fully formed URL string, or
|
||||
by using the :meth:`_engine.URL.create` constructor in order
|
||||
to construct a :class:`_engine.URL` programmatically given individual
|
||||
fields. The resulting :class:`.URL` object may be passed directly to
|
||||
:func:`_sa.create_engine` in place of a string argument, which will bypass
|
||||
the usage of :func:`.make_url` within the engine's creation process.
|
||||
|
||||
.. versionchanged:: 1.4
|
||||
|
||||
The :class:`_engine.URL` object is now an immutable object. To
|
||||
create a URL, use the :func:`_engine.make_url` or
|
||||
:meth:`_engine.URL.create` function / method. To modify
|
||||
a :class:`_engine.URL`, use methods like
|
||||
:meth:`_engine.URL.set` and
|
||||
:meth:`_engine.URL.update_query_dict` to return a new
|
||||
:class:`_engine.URL` object with modifications. See notes for this
|
||||
change at :ref:`change_5526`.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`database_urls`
|
||||
|
||||
:class:`_engine.URL` contains the following attributes:
|
||||
|
||||
* :attr:`_engine.URL.drivername`: database backend and driver name, such as
|
||||
``postgresql+psycopg2``
|
||||
* :attr:`_engine.URL.username`: username string
|
||||
* :attr:`_engine.URL.password`: password string
|
||||
* :attr:`_engine.URL.host`: string hostname
|
||||
* :attr:`_engine.URL.port`: integer port number
|
||||
* :attr:`_engine.URL.database`: string database name
|
||||
* :attr:`_engine.URL.query`: an immutable mapping representing the query
|
||||
string. contains strings for keys and either strings or tuples of
|
||||
strings for values.
|
||||
|
||||
|
||||
"""
|
||||
|
||||
drivername: str
|
||||
"""database backend and driver name, such as
|
||||
``postgresql+psycopg2``
|
||||
|
||||
"""
|
||||
|
||||
username: Optional[str]
|
||||
"username string"
|
||||
|
||||
password: Optional[str]
|
||||
"""password, which is normally a string but may also be any
|
||||
object that has a ``__str__()`` method."""
|
||||
|
||||
host: Optional[str]
|
||||
"""hostname or IP number. May also be a data source name for some
|
||||
drivers."""
|
||||
|
||||
port: Optional[int]
|
||||
"""integer port number"""
|
||||
|
||||
database: Optional[str]
|
||||
"""database name"""
|
||||
|
||||
query: util.immutabledict[str, Union[Tuple[str, ...], str]]
|
||||
"""an immutable mapping representing the query string. contains strings
|
||||
for keys and either strings or tuples of strings for values, e.g.::
|
||||
|
||||
>>> from sqlalchemy.engine import make_url
|
||||
>>> url = make_url("postgresql+psycopg2://user:pass@host/dbname?alt_host=host1&alt_host=host2&ssl_cipher=%2Fpath%2Fto%2Fcrt")
|
||||
>>> url.query
|
||||
immutabledict({'alt_host': ('host1', 'host2'), 'ssl_cipher': '/path/to/crt'})
|
||||
|
||||
To create a mutable copy of this mapping, use the ``dict`` constructor::
|
||||
|
||||
mutable_query_opts = dict(url.query)
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`_engine.URL.normalized_query` - normalizes all values into sequences
|
||||
for consistent processing
|
||||
|
||||
Methods for altering the contents of :attr:`_engine.URL.query`:
|
||||
|
||||
:meth:`_engine.URL.update_query_dict`
|
||||
|
||||
:meth:`_engine.URL.update_query_string`
|
||||
|
||||
:meth:`_engine.URL.update_query_pairs`
|
||||
|
||||
:meth:`_engine.URL.difference_update_query`
|
||||
|
||||
""" # noqa: E501
|
||||
|
||||
@classmethod
|
||||
def create(
|
||||
cls,
|
||||
drivername: str,
|
||||
username: Optional[str] = None,
|
||||
password: Optional[str] = None,
|
||||
host: Optional[str] = None,
|
||||
port: Optional[int] = None,
|
||||
database: Optional[str] = None,
|
||||
query: Mapping[str, Union[Sequence[str], str]] = util.EMPTY_DICT,
|
||||
) -> URL:
|
||||
"""Create a new :class:`_engine.URL` object.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`database_urls`
|
||||
|
||||
:param drivername: the name of the database backend. This name will
|
||||
correspond to a module in sqlalchemy/databases or a third party
|
||||
plug-in.
|
||||
:param username: The user name.
|
||||
:param password: database password. Is typically a string, but may
|
||||
also be an object that can be stringified with ``str()``.
|
||||
|
||||
.. note:: The password string should **not** be URL encoded when
|
||||
passed as an argument to :meth:`_engine.URL.create`; the string
|
||||
should contain the password characters exactly as they would be
|
||||
typed.
|
||||
|
||||
.. note:: A password-producing object will be stringified only
|
||||
**once** per :class:`_engine.Engine` object. For dynamic password
|
||||
generation per connect, see :ref:`engines_dynamic_tokens`.
|
||||
|
||||
:param host: The name of the host.
|
||||
:param port: The port number.
|
||||
:param database: The database name.
|
||||
:param query: A dictionary of string keys to string values to be passed
|
||||
to the dialect and/or the DBAPI upon connect. To specify non-string
|
||||
parameters to a Python DBAPI directly, use the
|
||||
:paramref:`_sa.create_engine.connect_args` parameter to
|
||||
:func:`_sa.create_engine`. See also
|
||||
:attr:`_engine.URL.normalized_query` for a dictionary that is
|
||||
consistently string->list of string.
|
||||
:return: new :class:`_engine.URL` object.
|
||||
|
||||
.. versionadded:: 1.4
|
||||
|
||||
The :class:`_engine.URL` object is now an **immutable named
|
||||
tuple**. In addition, the ``query`` dictionary is also immutable.
|
||||
To create a URL, use the :func:`_engine.url.make_url` or
|
||||
:meth:`_engine.URL.create` function/ method. To modify a
|
||||
:class:`_engine.URL`, use the :meth:`_engine.URL.set` and
|
||||
:meth:`_engine.URL.update_query` methods.
|
||||
|
||||
"""
|
||||
|
||||
return cls(
|
||||
cls._assert_str(drivername, "drivername"),
|
||||
cls._assert_none_str(username, "username"),
|
||||
password,
|
||||
cls._assert_none_str(host, "host"),
|
||||
cls._assert_port(port),
|
||||
cls._assert_none_str(database, "database"),
|
||||
cls._str_dict(query),
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def _assert_port(cls, port: Optional[int]) -> Optional[int]:
|
||||
if port is None:
|
||||
return None
|
||||
try:
|
||||
return int(port)
|
||||
except TypeError:
|
||||
raise TypeError("Port argument must be an integer or None")
|
||||
|
||||
@classmethod
|
||||
def _assert_str(cls, v: str, paramname: str) -> str:
|
||||
if not isinstance(v, str):
|
||||
raise TypeError("%s must be a string" % paramname)
|
||||
return v
|
||||
|
||||
@classmethod
|
||||
def _assert_none_str(
|
||||
cls, v: Optional[str], paramname: str
|
||||
) -> Optional[str]:
|
||||
if v is None:
|
||||
return v
|
||||
|
||||
return cls._assert_str(v, paramname)
|
||||
|
||||
@classmethod
|
||||
def _str_dict(
|
||||
cls,
|
||||
dict_: Optional[
|
||||
Union[
|
||||
Sequence[Tuple[str, Union[Sequence[str], str]]],
|
||||
Mapping[str, Union[Sequence[str], str]],
|
||||
]
|
||||
],
|
||||
) -> util.immutabledict[str, Union[Tuple[str, ...], str]]:
|
||||
if dict_ is None:
|
||||
return util.EMPTY_DICT
|
||||
|
||||
@overload
|
||||
def _assert_value(
|
||||
val: str,
|
||||
) -> str: ...
|
||||
|
||||
@overload
|
||||
def _assert_value(
|
||||
val: Sequence[str],
|
||||
) -> Union[str, Tuple[str, ...]]: ...
|
||||
|
||||
def _assert_value(
|
||||
val: Union[str, Sequence[str]],
|
||||
) -> Union[str, Tuple[str, ...]]:
|
||||
if isinstance(val, str):
|
||||
return val
|
||||
elif isinstance(val, collections_abc.Sequence):
|
||||
return tuple(_assert_value(elem) for elem in val)
|
||||
else:
|
||||
raise TypeError(
|
||||
"Query dictionary values must be strings or "
|
||||
"sequences of strings"
|
||||
)
|
||||
|
||||
def _assert_str(v: str) -> str:
|
||||
if not isinstance(v, str):
|
||||
raise TypeError("Query dictionary keys must be strings")
|
||||
return v
|
||||
|
||||
dict_items: Iterable[Tuple[str, Union[Sequence[str], str]]]
|
||||
if isinstance(dict_, collections_abc.Sequence):
|
||||
dict_items = dict_
|
||||
else:
|
||||
dict_items = dict_.items()
|
||||
|
||||
return util.immutabledict(
|
||||
{
|
||||
_assert_str(key): _assert_value(
|
||||
value,
|
||||
)
|
||||
for key, value in dict_items
|
||||
}
|
||||
)
|
||||
|
||||
def set(
|
||||
self,
|
||||
drivername: Optional[str] = None,
|
||||
username: Optional[str] = None,
|
||||
password: Optional[str] = None,
|
||||
host: Optional[str] = None,
|
||||
port: Optional[int] = None,
|
||||
database: Optional[str] = None,
|
||||
query: Optional[Mapping[str, Union[Sequence[str], str]]] = None,
|
||||
) -> URL:
|
||||
"""return a new :class:`_engine.URL` object with modifications.
|
||||
|
||||
Values are used if they are non-None. To set a value to ``None``
|
||||
explicitly, use the :meth:`_engine.URL._replace` method adapted
|
||||
from ``namedtuple``.
|
||||
|
||||
:param drivername: new drivername
|
||||
:param username: new username
|
||||
:param password: new password
|
||||
:param host: new hostname
|
||||
:param port: new port
|
||||
:param query: new query parameters, passed a dict of string keys
|
||||
referring to string or sequence of string values. Fully
|
||||
replaces the previous list of arguments.
|
||||
|
||||
:return: new :class:`_engine.URL` object.
|
||||
|
||||
.. versionadded:: 1.4
|
||||
|
||||
.. seealso::
|
||||
|
||||
:meth:`_engine.URL.update_query_dict`
|
||||
|
||||
"""
|
||||
|
||||
kw: Dict[str, Any] = {}
|
||||
if drivername is not None:
|
||||
kw["drivername"] = drivername
|
||||
if username is not None:
|
||||
kw["username"] = username
|
||||
if password is not None:
|
||||
kw["password"] = password
|
||||
if host is not None:
|
||||
kw["host"] = host
|
||||
if port is not None:
|
||||
kw["port"] = port
|
||||
if database is not None:
|
||||
kw["database"] = database
|
||||
if query is not None:
|
||||
kw["query"] = query
|
||||
|
||||
return self._assert_replace(**kw)
|
||||
|
||||
def _assert_replace(self, **kw: Any) -> URL:
|
||||
"""argument checks before calling _replace()"""
|
||||
|
||||
if "drivername" in kw:
|
||||
self._assert_str(kw["drivername"], "drivername")
|
||||
for name in "username", "host", "database":
|
||||
if name in kw:
|
||||
self._assert_none_str(kw[name], name)
|
||||
if "port" in kw:
|
||||
self._assert_port(kw["port"])
|
||||
if "query" in kw:
|
||||
kw["query"] = self._str_dict(kw["query"])
|
||||
|
||||
return self._replace(**kw)
|
||||
|
||||
def update_query_string(
|
||||
self, query_string: str, append: bool = False
|
||||
) -> URL:
|
||||
"""Return a new :class:`_engine.URL` object with the :attr:`_engine.URL.query`
|
||||
parameter dictionary updated by the given query string.
|
||||
|
||||
E.g.::
|
||||
|
||||
>>> from sqlalchemy.engine import make_url
|
||||
>>> url = make_url("postgresql+psycopg2://user:pass@host/dbname")
|
||||
>>> url = url.update_query_string("alt_host=host1&alt_host=host2&ssl_cipher=%2Fpath%2Fto%2Fcrt")
|
||||
>>> str(url)
|
||||
'postgresql+psycopg2://user:pass@host/dbname?alt_host=host1&alt_host=host2&ssl_cipher=%2Fpath%2Fto%2Fcrt'
|
||||
|
||||
:param query_string: a URL escaped query string, not including the
|
||||
question mark.
|
||||
|
||||
:param append: if True, parameters in the existing query string will
|
||||
not be removed; new parameters will be in addition to those present.
|
||||
If left at its default of False, keys present in the given query
|
||||
parameters will replace those of the existing query string.
|
||||
|
||||
.. versionadded:: 1.4
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`_engine.URL.query`
|
||||
|
||||
:meth:`_engine.URL.update_query_dict`
|
||||
|
||||
""" # noqa: E501
|
||||
return self.update_query_pairs(parse_qsl(query_string), append=append)
|
||||
|
||||
def update_query_pairs(
|
||||
self,
|
||||
key_value_pairs: Iterable[Tuple[str, Union[str, List[str]]]],
|
||||
append: bool = False,
|
||||
) -> URL:
|
||||
"""Return a new :class:`_engine.URL` object with the
|
||||
:attr:`_engine.URL.query`
|
||||
parameter dictionary updated by the given sequence of key/value pairs
|
||||
|
||||
E.g.::
|
||||
|
||||
>>> from sqlalchemy.engine import make_url
|
||||
>>> url = make_url("postgresql+psycopg2://user:pass@host/dbname")
|
||||
>>> url = url.update_query_pairs([("alt_host", "host1"), ("alt_host", "host2"), ("ssl_cipher", "/path/to/crt")])
|
||||
>>> str(url)
|
||||
'postgresql+psycopg2://user:pass@host/dbname?alt_host=host1&alt_host=host2&ssl_cipher=%2Fpath%2Fto%2Fcrt'
|
||||
|
||||
:param key_value_pairs: A sequence of tuples containing two strings
|
||||
each.
|
||||
|
||||
:param append: if True, parameters in the existing query string will
|
||||
not be removed; new parameters will be in addition to those present.
|
||||
If left at its default of False, keys present in the given query
|
||||
parameters will replace those of the existing query string.
|
||||
|
||||
.. versionadded:: 1.4
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`_engine.URL.query`
|
||||
|
||||
:meth:`_engine.URL.difference_update_query`
|
||||
|
||||
:meth:`_engine.URL.set`
|
||||
|
||||
""" # noqa: E501
|
||||
|
||||
existing_query = self.query
|
||||
new_keys: Dict[str, Union[str, List[str]]] = {}
|
||||
|
||||
for key, value in key_value_pairs:
|
||||
if key in new_keys:
|
||||
new_keys[key] = util.to_list(new_keys[key])
|
||||
cast("List[str]", new_keys[key]).append(cast(str, value))
|
||||
else:
|
||||
new_keys[key] = (
|
||||
list(value) if isinstance(value, (list, tuple)) else value
|
||||
)
|
||||
|
||||
new_query: Mapping[str, Union[str, Sequence[str]]]
|
||||
if append:
|
||||
new_query = {}
|
||||
|
||||
for k in new_keys:
|
||||
if k in existing_query:
|
||||
new_query[k] = tuple(
|
||||
util.to_list(existing_query[k])
|
||||
+ util.to_list(new_keys[k])
|
||||
)
|
||||
else:
|
||||
new_query[k] = new_keys[k]
|
||||
|
||||
new_query.update(
|
||||
{
|
||||
k: existing_query[k]
|
||||
for k in set(existing_query).difference(new_keys)
|
||||
}
|
||||
)
|
||||
else:
|
||||
new_query = self.query.union(
|
||||
{
|
||||
k: tuple(v) if isinstance(v, list) else v
|
||||
for k, v in new_keys.items()
|
||||
}
|
||||
)
|
||||
return self.set(query=new_query)
|
||||
|
||||
def update_query_dict(
|
||||
self,
|
||||
query_parameters: Mapping[str, Union[str, List[str]]],
|
||||
append: bool = False,
|
||||
) -> URL:
|
||||
"""Return a new :class:`_engine.URL` object with the
|
||||
:attr:`_engine.URL.query` parameter dictionary updated by the given
|
||||
dictionary.
|
||||
|
||||
The dictionary typically contains string keys and string values.
|
||||
In order to represent a query parameter that is expressed multiple
|
||||
times, pass a sequence of string values.
|
||||
|
||||
E.g.::
|
||||
|
||||
|
||||
>>> from sqlalchemy.engine import make_url
|
||||
>>> url = make_url("postgresql+psycopg2://user:pass@host/dbname")
|
||||
>>> url = url.update_query_dict({"alt_host": ["host1", "host2"], "ssl_cipher": "/path/to/crt"})
|
||||
>>> str(url)
|
||||
'postgresql+psycopg2://user:pass@host/dbname?alt_host=host1&alt_host=host2&ssl_cipher=%2Fpath%2Fto%2Fcrt'
|
||||
|
||||
|
||||
:param query_parameters: A dictionary with string keys and values
|
||||
that are either strings, or sequences of strings.
|
||||
|
||||
:param append: if True, parameters in the existing query string will
|
||||
not be removed; new parameters will be in addition to those present.
|
||||
If left at its default of False, keys present in the given query
|
||||
parameters will replace those of the existing query string.
|
||||
|
||||
|
||||
.. versionadded:: 1.4
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`_engine.URL.query`
|
||||
|
||||
:meth:`_engine.URL.update_query_string`
|
||||
|
||||
:meth:`_engine.URL.update_query_pairs`
|
||||
|
||||
:meth:`_engine.URL.difference_update_query`
|
||||
|
||||
:meth:`_engine.URL.set`
|
||||
|
||||
""" # noqa: E501
|
||||
return self.update_query_pairs(query_parameters.items(), append=append)
|
||||
|
||||
def difference_update_query(self, names: Iterable[str]) -> URL:
|
||||
"""
|
||||
Remove the given names from the :attr:`_engine.URL.query` dictionary,
|
||||
returning the new :class:`_engine.URL`.
|
||||
|
||||
E.g.::
|
||||
|
||||
url = url.difference_update_query(['foo', 'bar'])
|
||||
|
||||
Equivalent to using :meth:`_engine.URL.set` as follows::
|
||||
|
||||
url = url.set(
|
||||
query={
|
||||
key: url.query[key]
|
||||
for key in set(url.query).difference(['foo', 'bar'])
|
||||
}
|
||||
)
|
||||
|
||||
.. versionadded:: 1.4
|
||||
|
||||
.. seealso::
|
||||
|
||||
:attr:`_engine.URL.query`
|
||||
|
||||
:meth:`_engine.URL.update_query_dict`
|
||||
|
||||
:meth:`_engine.URL.set`
|
||||
|
||||
"""
|
||||
|
||||
if not set(names).intersection(self.query):
|
||||
return self
|
||||
|
||||
return URL(
|
||||
self.drivername,
|
||||
self.username,
|
||||
self.password,
|
||||
self.host,
|
||||
self.port,
|
||||
self.database,
|
||||
util.immutabledict(
|
||||
{
|
||||
key: self.query[key]
|
||||
for key in set(self.query).difference(names)
|
||||
}
|
||||
),
|
||||
)
|
||||
|
||||
@property
|
||||
def normalized_query(self) -> Mapping[str, Sequence[str]]:
|
||||
"""Return the :attr:`_engine.URL.query` dictionary with values normalized
|
||||
into sequences.
|
||||
|
||||
As the :attr:`_engine.URL.query` dictionary may contain either
|
||||
string values or sequences of string values to differentiate between
|
||||
parameters that are specified multiple times in the query string,
|
||||
code that needs to handle multiple parameters generically will wish
|
||||
to use this attribute so that all parameters present are presented
|
||||
as sequences. Inspiration is from Python's ``urllib.parse.parse_qs``
|
||||
function. E.g.::
|
||||
|
||||
|
||||
>>> from sqlalchemy.engine import make_url
|
||||
>>> url = make_url("postgresql+psycopg2://user:pass@host/dbname?alt_host=host1&alt_host=host2&ssl_cipher=%2Fpath%2Fto%2Fcrt")
|
||||
>>> url.query
|
||||
immutabledict({'alt_host': ('host1', 'host2'), 'ssl_cipher': '/path/to/crt'})
|
||||
>>> url.normalized_query
|
||||
immutabledict({'alt_host': ('host1', 'host2'), 'ssl_cipher': ('/path/to/crt',)})
|
||||
|
||||
""" # noqa: E501
|
||||
|
||||
return util.immutabledict(
|
||||
{
|
||||
k: (v,) if not isinstance(v, tuple) else v
|
||||
for k, v in self.query.items()
|
||||
}
|
||||
)
|
||||
|
||||
@util.deprecated(
|
||||
"1.4",
|
||||
"The :meth:`_engine.URL.__to_string__ method is deprecated and will "
|
||||
"be removed in a future release. Please use the "
|
||||
":meth:`_engine.URL.render_as_string` method.",
|
||||
)
|
||||
def __to_string__(self, hide_password: bool = True) -> str:
|
||||
"""Render this :class:`_engine.URL` object as a string.
|
||||
|
||||
:param hide_password: Defaults to True. The password is not shown
|
||||
in the string unless this is set to False.
|
||||
|
||||
"""
|
||||
return self.render_as_string(hide_password=hide_password)
|
||||
|
||||
def render_as_string(self, hide_password: bool = True) -> str:
|
||||
"""Render this :class:`_engine.URL` object as a string.
|
||||
|
||||
This method is used when the ``__str__()`` or ``__repr__()``
|
||||
methods are used. The method directly includes additional options.
|
||||
|
||||
:param hide_password: Defaults to True. The password is not shown
|
||||
in the string unless this is set to False.
|
||||
|
||||
"""
|
||||
s = self.drivername + "://"
|
||||
if self.username is not None:
|
||||
s += quote(self.username, safe=" +")
|
||||
if self.password is not None:
|
||||
s += ":" + (
|
||||
"***"
|
||||
if hide_password
|
||||
else quote(str(self.password), safe=" +")
|
||||
)
|
||||
s += "@"
|
||||
if self.host is not None:
|
||||
if ":" in self.host:
|
||||
s += f"[{self.host}]"
|
||||
else:
|
||||
s += self.host
|
||||
if self.port is not None:
|
||||
s += ":" + str(self.port)
|
||||
if self.database is not None:
|
||||
s += "/" + self.database
|
||||
if self.query:
|
||||
keys = list(self.query)
|
||||
keys.sort()
|
||||
s += "?" + "&".join(
|
||||
f"{quote_plus(k)}={quote_plus(element)}"
|
||||
for k in keys
|
||||
for element in util.to_list(self.query[k])
|
||||
)
|
||||
return s
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return self.render_as_string()
|
||||
|
||||
def __copy__(self) -> URL:
|
||||
return self.__class__.create(
|
||||
self.drivername,
|
||||
self.username,
|
||||
self.password,
|
||||
self.host,
|
||||
self.port,
|
||||
self.database,
|
||||
# note this is an immutabledict of str-> str / tuple of str,
|
||||
# also fully immutable. does not require deepcopy
|
||||
self.query,
|
||||
)
|
||||
|
||||
def __deepcopy__(self, memo: Any) -> URL:
|
||||
return self.__copy__()
|
||||
|
||||
def __hash__(self) -> int:
|
||||
return hash(str(self))
|
||||
|
||||
def __eq__(self, other: Any) -> bool:
|
||||
return (
|
||||
isinstance(other, URL)
|
||||
and self.drivername == other.drivername
|
||||
and self.username == other.username
|
||||
and self.password == other.password
|
||||
and self.host == other.host
|
||||
and self.database == other.database
|
||||
and self.query == other.query
|
||||
and self.port == other.port
|
||||
)
|
||||
|
||||
def __ne__(self, other: Any) -> bool:
|
||||
return not self == other
|
||||
|
||||
def get_backend_name(self) -> str:
|
||||
"""Return the backend name.
|
||||
|
||||
This is the name that corresponds to the database backend in
|
||||
use, and is the portion of the :attr:`_engine.URL.drivername`
|
||||
that is to the left of the plus sign.
|
||||
|
||||
"""
|
||||
if "+" not in self.drivername:
|
||||
return self.drivername
|
||||
else:
|
||||
return self.drivername.split("+")[0]
|
||||
|
||||
def get_driver_name(self) -> str:
|
||||
"""Return the backend name.
|
||||
|
||||
This is the name that corresponds to the DBAPI driver in
|
||||
use, and is the portion of the :attr:`_engine.URL.drivername`
|
||||
that is to the right of the plus sign.
|
||||
|
||||
If the :attr:`_engine.URL.drivername` does not include a plus sign,
|
||||
then the default :class:`_engine.Dialect` for this :class:`_engine.URL`
|
||||
is imported in order to get the driver name.
|
||||
|
||||
"""
|
||||
|
||||
if "+" not in self.drivername:
|
||||
return self.get_dialect().driver
|
||||
else:
|
||||
return self.drivername.split("+")[1]
|
||||
|
||||
def _instantiate_plugins(
|
||||
self, kwargs: Mapping[str, Any]
|
||||
) -> Tuple[URL, List[Any], Dict[str, Any]]:
|
||||
plugin_names = util.to_list(self.query.get("plugin", ()))
|
||||
plugin_names += kwargs.get("plugins", [])
|
||||
|
||||
kwargs = dict(kwargs)
|
||||
|
||||
loaded_plugins = [
|
||||
plugins.load(plugin_name)(self, kwargs)
|
||||
for plugin_name in plugin_names
|
||||
]
|
||||
|
||||
u = self.difference_update_query(["plugin", "plugins"])
|
||||
|
||||
for plugin in loaded_plugins:
|
||||
new_u = plugin.update_url(u)
|
||||
if new_u is not None:
|
||||
u = new_u
|
||||
|
||||
kwargs.pop("plugins", None)
|
||||
|
||||
return u, loaded_plugins, kwargs
|
||||
|
||||
def _get_entrypoint(self) -> Type[Dialect]:
|
||||
"""Return the "entry point" dialect class.
|
||||
|
||||
This is normally the dialect itself except in the case when the
|
||||
returned class implements the get_dialect_cls() method.
|
||||
|
||||
"""
|
||||
if "+" not in self.drivername:
|
||||
name = self.drivername
|
||||
else:
|
||||
name = self.drivername.replace("+", ".")
|
||||
cls = registry.load(name)
|
||||
# check for legacy dialects that
|
||||
# would return a module with 'dialect' as the
|
||||
# actual class
|
||||
if (
|
||||
hasattr(cls, "dialect")
|
||||
and isinstance(cls.dialect, type)
|
||||
and issubclass(cls.dialect, Dialect)
|
||||
):
|
||||
return cls.dialect
|
||||
else:
|
||||
return cast("Type[Dialect]", cls)
|
||||
|
||||
def get_dialect(self, _is_async: bool = False) -> Type[Dialect]:
|
||||
"""Return the SQLAlchemy :class:`_engine.Dialect` class corresponding
|
||||
to this URL's driver name.
|
||||
|
||||
"""
|
||||
entrypoint = self._get_entrypoint()
|
||||
if _is_async:
|
||||
dialect_cls = entrypoint.get_async_dialect_cls(self)
|
||||
else:
|
||||
dialect_cls = entrypoint.get_dialect_cls(self)
|
||||
return dialect_cls
|
||||
|
||||
def translate_connect_args(
|
||||
self, names: Optional[List[str]] = None, **kw: Any
|
||||
) -> Dict[str, Any]:
|
||||
r"""Translate url attributes into a dictionary of connection arguments.
|
||||
|
||||
Returns attributes of this url (`host`, `database`, `username`,
|
||||
`password`, `port`) as a plain dictionary. The attribute names are
|
||||
used as the keys by default. Unset or false attributes are omitted
|
||||
from the final dictionary.
|
||||
|
||||
:param \**kw: Optional, alternate key names for url attributes.
|
||||
|
||||
:param names: Deprecated. Same purpose as the keyword-based alternate
|
||||
names, but correlates the name to the original positionally.
|
||||
"""
|
||||
|
||||
if names is not None:
|
||||
util.warn_deprecated(
|
||||
"The `URL.translate_connect_args.name`s parameter is "
|
||||
"deprecated. Please pass the "
|
||||
"alternate names as kw arguments.",
|
||||
"1.4",
|
||||
)
|
||||
|
||||
translated = {}
|
||||
attribute_names = ["host", "database", "username", "password", "port"]
|
||||
for sname in attribute_names:
|
||||
if names:
|
||||
name = names.pop(0)
|
||||
elif sname in kw:
|
||||
name = kw[sname]
|
||||
else:
|
||||
name = sname
|
||||
if name is not None and getattr(self, sname, False):
|
||||
if sname == "password":
|
||||
translated[name] = str(getattr(self, sname))
|
||||
else:
|
||||
translated[name] = getattr(self, sname)
|
||||
|
||||
return translated
|
||||
|
||||
|
||||
def make_url(name_or_url: Union[str, URL]) -> URL:
|
||||
"""Given a string, produce a new URL instance.
|
||||
|
||||
The format of the URL generally follows `RFC-1738
|
||||
<https://www.ietf.org/rfc/rfc1738.txt>`_, with some exceptions, including
|
||||
that underscores, and not dashes or periods, are accepted within the
|
||||
"scheme" portion.
|
||||
|
||||
If a :class:`.URL` object is passed, it is returned as is.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:ref:`database_urls`
|
||||
|
||||
"""
|
||||
|
||||
if isinstance(name_or_url, str):
|
||||
return _parse_url(name_or_url)
|
||||
elif not isinstance(name_or_url, URL) and not hasattr(
|
||||
name_or_url, "_sqla_is_testing_if_this_is_a_mock_object"
|
||||
):
|
||||
raise exc.ArgumentError(
|
||||
f"Expected string or URL object, got {name_or_url!r}"
|
||||
)
|
||||
else:
|
||||
return name_or_url
|
||||
|
||||
|
||||
def _parse_url(name: str) -> URL:
|
||||
pattern = re.compile(
|
||||
r"""
|
||||
(?P<name>[\w\+]+)://
|
||||
(?:
|
||||
(?P<username>[^:/]*)
|
||||
(?::(?P<password>[^@]*))?
|
||||
@)?
|
||||
(?:
|
||||
(?:
|
||||
\[(?P<ipv6host>[^/\?]+)\] |
|
||||
(?P<ipv4host>[^/:\?]+)
|
||||
)?
|
||||
(?::(?P<port>[^/\?]*))?
|
||||
)?
|
||||
(?:/(?P<database>[^\?]*))?
|
||||
(?:\?(?P<query>.*))?
|
||||
""",
|
||||
re.X,
|
||||
)
|
||||
|
||||
m = pattern.match(name)
|
||||
if m is not None:
|
||||
components = m.groupdict()
|
||||
query: Optional[Dict[str, Union[str, List[str]]]]
|
||||
if components["query"] is not None:
|
||||
query = {}
|
||||
|
||||
for key, value in parse_qsl(components["query"]):
|
||||
if key in query:
|
||||
query[key] = util.to_list(query[key])
|
||||
cast("List[str]", query[key]).append(value)
|
||||
else:
|
||||
query[key] = value
|
||||
else:
|
||||
query = None
|
||||
components["query"] = query
|
||||
|
||||
if components["username"] is not None:
|
||||
components["username"] = unquote(components["username"])
|
||||
|
||||
if components["password"] is not None:
|
||||
components["password"] = unquote(components["password"])
|
||||
|
||||
ipv4host = components.pop("ipv4host")
|
||||
ipv6host = components.pop("ipv6host")
|
||||
components["host"] = ipv4host or ipv6host
|
||||
name = components.pop("name")
|
||||
|
||||
if components["port"]:
|
||||
components["port"] = int(components["port"])
|
||||
|
||||
return URL.create(name, **components) # type: ignore
|
||||
|
||||
else:
|
||||
raise exc.ArgumentError(
|
||||
"Could not parse SQLAlchemy URL from string '%s'" % name
|
||||
)
|
166
elitebot/lib/python3.11/site-packages/sqlalchemy/engine/util.py
Normal file
166
elitebot/lib/python3.11/site-packages/sqlalchemy/engine/util.py
Normal file
|
@ -0,0 +1,166 @@
|
|||
# engine/util.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import typing
|
||||
from typing import Any
|
||||
from typing import Callable
|
||||
from typing import Optional
|
||||
from typing import TypeVar
|
||||
|
||||
from .. import exc
|
||||
from .. import util
|
||||
from ..util._has_cy import HAS_CYEXTENSION
|
||||
from ..util.typing import Protocol
|
||||
|
||||
if typing.TYPE_CHECKING or not HAS_CYEXTENSION:
|
||||
from ._py_util import _distill_params_20 as _distill_params_20
|
||||
from ._py_util import _distill_raw_params as _distill_raw_params
|
||||
else:
|
||||
from sqlalchemy.cyextension.util import ( # noqa: F401
|
||||
_distill_params_20 as _distill_params_20,
|
||||
)
|
||||
from sqlalchemy.cyextension.util import ( # noqa: F401
|
||||
_distill_raw_params as _distill_raw_params,
|
||||
)
|
||||
|
||||
_C = TypeVar("_C", bound=Callable[[], Any])
|
||||
|
||||
|
||||
def connection_memoize(key: str) -> Callable[[_C], _C]:
|
||||
"""Decorator, memoize a function in a connection.info stash.
|
||||
|
||||
Only applicable to functions which take no arguments other than a
|
||||
connection. The memo will be stored in ``connection.info[key]``.
|
||||
"""
|
||||
|
||||
@util.decorator
|
||||
def decorated(fn, self, connection): # type: ignore
|
||||
connection = connection.connect()
|
||||
try:
|
||||
return connection.info[key]
|
||||
except KeyError:
|
||||
connection.info[key] = val = fn(self, connection)
|
||||
return val
|
||||
|
||||
return decorated
|
||||
|
||||
|
||||
class _TConsSubject(Protocol):
|
||||
_trans_context_manager: Optional[TransactionalContext]
|
||||
|
||||
|
||||
class TransactionalContext:
|
||||
"""Apply Python context manager behavior to transaction objects.
|
||||
|
||||
Performs validation to ensure the subject of the transaction is not
|
||||
used if the transaction were ended prematurely.
|
||||
|
||||
"""
|
||||
|
||||
__slots__ = ("_outer_trans_ctx", "_trans_subject", "__weakref__")
|
||||
|
||||
_trans_subject: Optional[_TConsSubject]
|
||||
|
||||
def _transaction_is_active(self) -> bool:
|
||||
raise NotImplementedError()
|
||||
|
||||
def _transaction_is_closed(self) -> bool:
|
||||
raise NotImplementedError()
|
||||
|
||||
def _rollback_can_be_called(self) -> bool:
|
||||
"""indicates the object is in a state that is known to be acceptable
|
||||
for rollback() to be called.
|
||||
|
||||
This does not necessarily mean rollback() will succeed or not raise
|
||||
an error, just that there is currently no state detected that indicates
|
||||
rollback() would fail or emit warnings.
|
||||
|
||||
It also does not mean that there's a transaction in progress, as
|
||||
it is usually safe to call rollback() even if no transaction is
|
||||
present.
|
||||
|
||||
.. versionadded:: 1.4.28
|
||||
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def _get_subject(self) -> _TConsSubject:
|
||||
raise NotImplementedError()
|
||||
|
||||
def commit(self) -> None:
|
||||
raise NotImplementedError()
|
||||
|
||||
def rollback(self) -> None:
|
||||
raise NotImplementedError()
|
||||
|
||||
def close(self) -> None:
|
||||
raise NotImplementedError()
|
||||
|
||||
@classmethod
|
||||
def _trans_ctx_check(cls, subject: _TConsSubject) -> None:
|
||||
trans_context = subject._trans_context_manager
|
||||
if trans_context:
|
||||
if not trans_context._transaction_is_active():
|
||||
raise exc.InvalidRequestError(
|
||||
"Can't operate on closed transaction inside context "
|
||||
"manager. Please complete the context manager "
|
||||
"before emitting further commands."
|
||||
)
|
||||
|
||||
def __enter__(self) -> TransactionalContext:
|
||||
subject = self._get_subject()
|
||||
|
||||
# none for outer transaction, may be non-None for nested
|
||||
# savepoint, legacy nesting cases
|
||||
trans_context = subject._trans_context_manager
|
||||
self._outer_trans_ctx = trans_context
|
||||
|
||||
self._trans_subject = subject
|
||||
subject._trans_context_manager = self
|
||||
return self
|
||||
|
||||
def __exit__(self, type_: Any, value: Any, traceback: Any) -> None:
|
||||
subject = getattr(self, "_trans_subject", None)
|
||||
|
||||
# simplistically we could assume that
|
||||
# "subject._trans_context_manager is self". However, any calling
|
||||
# code that is manipulating __exit__ directly would break this
|
||||
# assumption. alembic context manager
|
||||
# is an example of partial use that just calls __exit__ and
|
||||
# not __enter__ at the moment. it's safe to assume this is being done
|
||||
# in the wild also
|
||||
out_of_band_exit = (
|
||||
subject is None or subject._trans_context_manager is not self
|
||||
)
|
||||
|
||||
if type_ is None and self._transaction_is_active():
|
||||
try:
|
||||
self.commit()
|
||||
except:
|
||||
with util.safe_reraise():
|
||||
if self._rollback_can_be_called():
|
||||
self.rollback()
|
||||
finally:
|
||||
if not out_of_band_exit:
|
||||
assert subject is not None
|
||||
subject._trans_context_manager = self._outer_trans_ctx
|
||||
self._trans_subject = self._outer_trans_ctx = None
|
||||
else:
|
||||
try:
|
||||
if not self._transaction_is_active():
|
||||
if not self._transaction_is_closed():
|
||||
self.close()
|
||||
else:
|
||||
if self._rollback_can_be_called():
|
||||
self.rollback()
|
||||
finally:
|
||||
if not out_of_band_exit:
|
||||
assert subject is not None
|
||||
subject._trans_context_manager = self._outer_trans_ctx
|
||||
self._trans_subject = self._outer_trans_ctx = None
|
Loading…
Add table
Add a link
Reference in a new issue