Cleaned up the directories
This commit is contained in:
parent
f708506d68
commit
a683fcffea
1340 changed files with 554582 additions and 6840 deletions
|
@ -0,0 +1,88 @@
|
|||
# dialects/mssql/__init__.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
from . import aioodbc # noqa
|
||||
from . import base # noqa
|
||||
from . import pymssql # noqa
|
||||
from . import pyodbc # noqa
|
||||
from .base import BIGINT
|
||||
from .base import BINARY
|
||||
from .base import BIT
|
||||
from .base import CHAR
|
||||
from .base import DATE
|
||||
from .base import DATETIME
|
||||
from .base import DATETIME2
|
||||
from .base import DATETIMEOFFSET
|
||||
from .base import DECIMAL
|
||||
from .base import DOUBLE_PRECISION
|
||||
from .base import FLOAT
|
||||
from .base import IMAGE
|
||||
from .base import INTEGER
|
||||
from .base import JSON
|
||||
from .base import MONEY
|
||||
from .base import NCHAR
|
||||
from .base import NTEXT
|
||||
from .base import NUMERIC
|
||||
from .base import NVARCHAR
|
||||
from .base import REAL
|
||||
from .base import ROWVERSION
|
||||
from .base import SMALLDATETIME
|
||||
from .base import SMALLINT
|
||||
from .base import SMALLMONEY
|
||||
from .base import SQL_VARIANT
|
||||
from .base import TEXT
|
||||
from .base import TIME
|
||||
from .base import TIMESTAMP
|
||||
from .base import TINYINT
|
||||
from .base import UNIQUEIDENTIFIER
|
||||
from .base import VARBINARY
|
||||
from .base import VARCHAR
|
||||
from .base import XML
|
||||
from ...sql import try_cast
|
||||
|
||||
|
||||
base.dialect = dialect = pyodbc.dialect
|
||||
|
||||
|
||||
__all__ = (
|
||||
"JSON",
|
||||
"INTEGER",
|
||||
"BIGINT",
|
||||
"SMALLINT",
|
||||
"TINYINT",
|
||||
"VARCHAR",
|
||||
"NVARCHAR",
|
||||
"CHAR",
|
||||
"NCHAR",
|
||||
"TEXT",
|
||||
"NTEXT",
|
||||
"DECIMAL",
|
||||
"NUMERIC",
|
||||
"FLOAT",
|
||||
"DATETIME",
|
||||
"DATETIME2",
|
||||
"DATETIMEOFFSET",
|
||||
"DATE",
|
||||
"DOUBLE_PRECISION",
|
||||
"TIME",
|
||||
"SMALLDATETIME",
|
||||
"BINARY",
|
||||
"VARBINARY",
|
||||
"BIT",
|
||||
"REAL",
|
||||
"IMAGE",
|
||||
"TIMESTAMP",
|
||||
"ROWVERSION",
|
||||
"MONEY",
|
||||
"SMALLMONEY",
|
||||
"UNIQUEIDENTIFIER",
|
||||
"SQL_VARIANT",
|
||||
"XML",
|
||||
"dialect",
|
||||
"try_cast",
|
||||
)
|
|
@ -0,0 +1,64 @@
|
|||
# dialects/mssql/aioodbc.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
r"""
|
||||
.. dialect:: mssql+aioodbc
|
||||
:name: aioodbc
|
||||
:dbapi: aioodbc
|
||||
:connectstring: mssql+aioodbc://<username>:<password>@<dsnname>
|
||||
:url: https://pypi.org/project/aioodbc/
|
||||
|
||||
|
||||
Support for the SQL Server database in asyncio style, using the aioodbc
|
||||
driver which itself is a thread-wrapper around pyodbc.
|
||||
|
||||
.. versionadded:: 2.0.23 Added the mssql+aioodbc dialect which builds
|
||||
on top of the pyodbc and general aio* dialect architecture.
|
||||
|
||||
Using a special asyncio mediation layer, the aioodbc dialect is usable
|
||||
as the backend for the :ref:`SQLAlchemy asyncio <asyncio_toplevel>`
|
||||
extension package.
|
||||
|
||||
Most behaviors and caveats for this driver are the same as that of the
|
||||
pyodbc dialect used on SQL Server; see :ref:`mssql_pyodbc` for general
|
||||
background.
|
||||
|
||||
This dialect should normally be used only with the
|
||||
:func:`_asyncio.create_async_engine` engine creation function; connection
|
||||
styles are otherwise equivalent to those documented in the pyodbc section::
|
||||
|
||||
from sqlalchemy.ext.asyncio import create_async_engine
|
||||
engine = create_async_engine(
|
||||
"mssql+aioodbc://scott:tiger@mssql2017:1433/test?"
|
||||
"driver=ODBC+Driver+18+for+SQL+Server&TrustServerCertificate=yes"
|
||||
)
|
||||
|
||||
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from .pyodbc import MSDialect_pyodbc
|
||||
from .pyodbc import MSExecutionContext_pyodbc
|
||||
from ...connectors.aioodbc import aiodbcConnector
|
||||
|
||||
|
||||
class MSExecutionContext_aioodbc(MSExecutionContext_pyodbc):
|
||||
def create_server_side_cursor(self):
|
||||
return self._dbapi_connection.cursor(server_side=True)
|
||||
|
||||
|
||||
class MSDialectAsync_aioodbc(aiodbcConnector, MSDialect_pyodbc):
|
||||
driver = "aioodbc"
|
||||
|
||||
supports_statement_cache = True
|
||||
|
||||
execution_ctx_cls = MSExecutionContext_aioodbc
|
||||
|
||||
|
||||
dialect = MSDialectAsync_aioodbc
|
File diff suppressed because it is too large
Load diff
|
@ -0,0 +1,254 @@
|
|||
# dialects/mssql/information_schema.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
from ... import cast
|
||||
from ... import Column
|
||||
from ... import MetaData
|
||||
from ... import Table
|
||||
from ...ext.compiler import compiles
|
||||
from ...sql import expression
|
||||
from ...types import Boolean
|
||||
from ...types import Integer
|
||||
from ...types import Numeric
|
||||
from ...types import NVARCHAR
|
||||
from ...types import String
|
||||
from ...types import TypeDecorator
|
||||
from ...types import Unicode
|
||||
|
||||
|
||||
ischema = MetaData()
|
||||
|
||||
|
||||
class CoerceUnicode(TypeDecorator):
|
||||
impl = Unicode
|
||||
cache_ok = True
|
||||
|
||||
def bind_expression(self, bindvalue):
|
||||
return _cast_on_2005(bindvalue)
|
||||
|
||||
|
||||
class _cast_on_2005(expression.ColumnElement):
|
||||
def __init__(self, bindvalue):
|
||||
self.bindvalue = bindvalue
|
||||
|
||||
|
||||
@compiles(_cast_on_2005)
|
||||
def _compile(element, compiler, **kw):
|
||||
from . import base
|
||||
|
||||
if (
|
||||
compiler.dialect.server_version_info is None
|
||||
or compiler.dialect.server_version_info < base.MS_2005_VERSION
|
||||
):
|
||||
return compiler.process(element.bindvalue, **kw)
|
||||
else:
|
||||
return compiler.process(cast(element.bindvalue, Unicode), **kw)
|
||||
|
||||
|
||||
schemata = Table(
|
||||
"SCHEMATA",
|
||||
ischema,
|
||||
Column("CATALOG_NAME", CoerceUnicode, key="catalog_name"),
|
||||
Column("SCHEMA_NAME", CoerceUnicode, key="schema_name"),
|
||||
Column("SCHEMA_OWNER", CoerceUnicode, key="schema_owner"),
|
||||
schema="INFORMATION_SCHEMA",
|
||||
)
|
||||
|
||||
tables = Table(
|
||||
"TABLES",
|
||||
ischema,
|
||||
Column("TABLE_CATALOG", CoerceUnicode, key="table_catalog"),
|
||||
Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
|
||||
Column("TABLE_NAME", CoerceUnicode, key="table_name"),
|
||||
Column("TABLE_TYPE", CoerceUnicode, key="table_type"),
|
||||
schema="INFORMATION_SCHEMA",
|
||||
)
|
||||
|
||||
columns = Table(
|
||||
"COLUMNS",
|
||||
ischema,
|
||||
Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
|
||||
Column("TABLE_NAME", CoerceUnicode, key="table_name"),
|
||||
Column("COLUMN_NAME", CoerceUnicode, key="column_name"),
|
||||
Column("IS_NULLABLE", Integer, key="is_nullable"),
|
||||
Column("DATA_TYPE", String, key="data_type"),
|
||||
Column("ORDINAL_POSITION", Integer, key="ordinal_position"),
|
||||
Column(
|
||||
"CHARACTER_MAXIMUM_LENGTH", Integer, key="character_maximum_length"
|
||||
),
|
||||
Column("NUMERIC_PRECISION", Integer, key="numeric_precision"),
|
||||
Column("NUMERIC_SCALE", Integer, key="numeric_scale"),
|
||||
Column("COLUMN_DEFAULT", Integer, key="column_default"),
|
||||
Column("COLLATION_NAME", String, key="collation_name"),
|
||||
schema="INFORMATION_SCHEMA",
|
||||
)
|
||||
|
||||
mssql_temp_table_columns = Table(
|
||||
"COLUMNS",
|
||||
ischema,
|
||||
Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
|
||||
Column("TABLE_NAME", CoerceUnicode, key="table_name"),
|
||||
Column("COLUMN_NAME", CoerceUnicode, key="column_name"),
|
||||
Column("IS_NULLABLE", Integer, key="is_nullable"),
|
||||
Column("DATA_TYPE", String, key="data_type"),
|
||||
Column("ORDINAL_POSITION", Integer, key="ordinal_position"),
|
||||
Column(
|
||||
"CHARACTER_MAXIMUM_LENGTH", Integer, key="character_maximum_length"
|
||||
),
|
||||
Column("NUMERIC_PRECISION", Integer, key="numeric_precision"),
|
||||
Column("NUMERIC_SCALE", Integer, key="numeric_scale"),
|
||||
Column("COLUMN_DEFAULT", Integer, key="column_default"),
|
||||
Column("COLLATION_NAME", String, key="collation_name"),
|
||||
schema="tempdb.INFORMATION_SCHEMA",
|
||||
)
|
||||
|
||||
constraints = Table(
|
||||
"TABLE_CONSTRAINTS",
|
||||
ischema,
|
||||
Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
|
||||
Column("TABLE_NAME", CoerceUnicode, key="table_name"),
|
||||
Column("CONSTRAINT_NAME", CoerceUnicode, key="constraint_name"),
|
||||
Column("CONSTRAINT_TYPE", CoerceUnicode, key="constraint_type"),
|
||||
schema="INFORMATION_SCHEMA",
|
||||
)
|
||||
|
||||
column_constraints = Table(
|
||||
"CONSTRAINT_COLUMN_USAGE",
|
||||
ischema,
|
||||
Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
|
||||
Column("TABLE_NAME", CoerceUnicode, key="table_name"),
|
||||
Column("COLUMN_NAME", CoerceUnicode, key="column_name"),
|
||||
Column("CONSTRAINT_NAME", CoerceUnicode, key="constraint_name"),
|
||||
schema="INFORMATION_SCHEMA",
|
||||
)
|
||||
|
||||
key_constraints = Table(
|
||||
"KEY_COLUMN_USAGE",
|
||||
ischema,
|
||||
Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
|
||||
Column("TABLE_NAME", CoerceUnicode, key="table_name"),
|
||||
Column("COLUMN_NAME", CoerceUnicode, key="column_name"),
|
||||
Column("CONSTRAINT_NAME", CoerceUnicode, key="constraint_name"),
|
||||
Column("CONSTRAINT_SCHEMA", CoerceUnicode, key="constraint_schema"),
|
||||
Column("ORDINAL_POSITION", Integer, key="ordinal_position"),
|
||||
schema="INFORMATION_SCHEMA",
|
||||
)
|
||||
|
||||
ref_constraints = Table(
|
||||
"REFERENTIAL_CONSTRAINTS",
|
||||
ischema,
|
||||
Column("CONSTRAINT_CATALOG", CoerceUnicode, key="constraint_catalog"),
|
||||
Column("CONSTRAINT_SCHEMA", CoerceUnicode, key="constraint_schema"),
|
||||
Column("CONSTRAINT_NAME", CoerceUnicode, key="constraint_name"),
|
||||
# TODO: is CATLOG misspelled ?
|
||||
Column(
|
||||
"UNIQUE_CONSTRAINT_CATLOG",
|
||||
CoerceUnicode,
|
||||
key="unique_constraint_catalog",
|
||||
),
|
||||
Column(
|
||||
"UNIQUE_CONSTRAINT_SCHEMA",
|
||||
CoerceUnicode,
|
||||
key="unique_constraint_schema",
|
||||
),
|
||||
Column(
|
||||
"UNIQUE_CONSTRAINT_NAME", CoerceUnicode, key="unique_constraint_name"
|
||||
),
|
||||
Column("MATCH_OPTION", String, key="match_option"),
|
||||
Column("UPDATE_RULE", String, key="update_rule"),
|
||||
Column("DELETE_RULE", String, key="delete_rule"),
|
||||
schema="INFORMATION_SCHEMA",
|
||||
)
|
||||
|
||||
views = Table(
|
||||
"VIEWS",
|
||||
ischema,
|
||||
Column("TABLE_CATALOG", CoerceUnicode, key="table_catalog"),
|
||||
Column("TABLE_SCHEMA", CoerceUnicode, key="table_schema"),
|
||||
Column("TABLE_NAME", CoerceUnicode, key="table_name"),
|
||||
Column("VIEW_DEFINITION", CoerceUnicode, key="view_definition"),
|
||||
Column("CHECK_OPTION", String, key="check_option"),
|
||||
Column("IS_UPDATABLE", String, key="is_updatable"),
|
||||
schema="INFORMATION_SCHEMA",
|
||||
)
|
||||
|
||||
computed_columns = Table(
|
||||
"computed_columns",
|
||||
ischema,
|
||||
Column("object_id", Integer),
|
||||
Column("name", CoerceUnicode),
|
||||
Column("is_computed", Boolean),
|
||||
Column("is_persisted", Boolean),
|
||||
Column("definition", CoerceUnicode),
|
||||
schema="sys",
|
||||
)
|
||||
|
||||
sequences = Table(
|
||||
"SEQUENCES",
|
||||
ischema,
|
||||
Column("SEQUENCE_CATALOG", CoerceUnicode, key="sequence_catalog"),
|
||||
Column("SEQUENCE_SCHEMA", CoerceUnicode, key="sequence_schema"),
|
||||
Column("SEQUENCE_NAME", CoerceUnicode, key="sequence_name"),
|
||||
schema="INFORMATION_SCHEMA",
|
||||
)
|
||||
|
||||
|
||||
class NumericSqlVariant(TypeDecorator):
|
||||
r"""This type casts sql_variant columns in the identity_columns view
|
||||
to numeric. This is required because:
|
||||
|
||||
* pyodbc does not support sql_variant
|
||||
* pymssql under python 2 return the byte representation of the number,
|
||||
int 1 is returned as "\x01\x00\x00\x00". On python 3 it returns the
|
||||
correct value as string.
|
||||
"""
|
||||
|
||||
impl = Unicode
|
||||
cache_ok = True
|
||||
|
||||
def column_expression(self, colexpr):
|
||||
return cast(colexpr, Numeric(38, 0))
|
||||
|
||||
|
||||
identity_columns = Table(
|
||||
"identity_columns",
|
||||
ischema,
|
||||
Column("object_id", Integer),
|
||||
Column("name", CoerceUnicode),
|
||||
Column("is_identity", Boolean),
|
||||
Column("seed_value", NumericSqlVariant),
|
||||
Column("increment_value", NumericSqlVariant),
|
||||
Column("last_value", NumericSqlVariant),
|
||||
Column("is_not_for_replication", Boolean),
|
||||
schema="sys",
|
||||
)
|
||||
|
||||
|
||||
class NVarcharSqlVariant(TypeDecorator):
|
||||
"""This type casts sql_variant columns in the extended_properties view
|
||||
to nvarchar. This is required because pyodbc does not support sql_variant
|
||||
"""
|
||||
|
||||
impl = Unicode
|
||||
cache_ok = True
|
||||
|
||||
def column_expression(self, colexpr):
|
||||
return cast(colexpr, NVARCHAR)
|
||||
|
||||
|
||||
extended_properties = Table(
|
||||
"extended_properties",
|
||||
ischema,
|
||||
Column("class", Integer), # TINYINT
|
||||
Column("class_desc", CoerceUnicode),
|
||||
Column("major_id", Integer),
|
||||
Column("minor_id", Integer),
|
||||
Column("name", CoerceUnicode),
|
||||
Column("value", NVarcharSqlVariant),
|
||||
schema="sys",
|
||||
)
|
|
@ -0,0 +1,133 @@
|
|||
# dialects/mssql/json.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
from ... import types as sqltypes
|
||||
|
||||
# technically, all the dialect-specific datatypes that don't have any special
|
||||
# behaviors would be private with names like _MSJson. However, we haven't been
|
||||
# doing this for mysql.JSON or sqlite.JSON which both have JSON / JSONIndexType
|
||||
# / JSONPathType in their json.py files, so keep consistent with that
|
||||
# sub-convention for now. A future change can update them all to be
|
||||
# package-private at once.
|
||||
|
||||
|
||||
class JSON(sqltypes.JSON):
|
||||
"""MSSQL JSON type.
|
||||
|
||||
MSSQL supports JSON-formatted data as of SQL Server 2016.
|
||||
|
||||
The :class:`_mssql.JSON` datatype at the DDL level will represent the
|
||||
datatype as ``NVARCHAR(max)``, but provides for JSON-level comparison
|
||||
functions as well as Python coercion behavior.
|
||||
|
||||
:class:`_mssql.JSON` is used automatically whenever the base
|
||||
:class:`_types.JSON` datatype is used against a SQL Server backend.
|
||||
|
||||
.. seealso::
|
||||
|
||||
:class:`_types.JSON` - main documentation for the generic
|
||||
cross-platform JSON datatype.
|
||||
|
||||
The :class:`_mssql.JSON` type supports persistence of JSON values
|
||||
as well as the core index operations provided by :class:`_types.JSON`
|
||||
datatype, by adapting the operations to render the ``JSON_VALUE``
|
||||
or ``JSON_QUERY`` functions at the database level.
|
||||
|
||||
The SQL Server :class:`_mssql.JSON` type necessarily makes use of the
|
||||
``JSON_QUERY`` and ``JSON_VALUE`` functions when querying for elements
|
||||
of a JSON object. These two functions have a major restriction in that
|
||||
they are **mutually exclusive** based on the type of object to be returned.
|
||||
The ``JSON_QUERY`` function **only** returns a JSON dictionary or list,
|
||||
but not an individual string, numeric, or boolean element; the
|
||||
``JSON_VALUE`` function **only** returns an individual string, numeric,
|
||||
or boolean element. **both functions either return NULL or raise
|
||||
an error if they are not used against the correct expected value**.
|
||||
|
||||
To handle this awkward requirement, indexed access rules are as follows:
|
||||
|
||||
1. When extracting a sub element from a JSON that is itself a JSON
|
||||
dictionary or list, the :meth:`_types.JSON.Comparator.as_json` accessor
|
||||
should be used::
|
||||
|
||||
stmt = select(
|
||||
data_table.c.data["some key"].as_json()
|
||||
).where(
|
||||
data_table.c.data["some key"].as_json() == {"sub": "structure"}
|
||||
)
|
||||
|
||||
2. When extracting a sub element from a JSON that is a plain boolean,
|
||||
string, integer, or float, use the appropriate method among
|
||||
:meth:`_types.JSON.Comparator.as_boolean`,
|
||||
:meth:`_types.JSON.Comparator.as_string`,
|
||||
:meth:`_types.JSON.Comparator.as_integer`,
|
||||
:meth:`_types.JSON.Comparator.as_float`::
|
||||
|
||||
stmt = select(
|
||||
data_table.c.data["some key"].as_string()
|
||||
).where(
|
||||
data_table.c.data["some key"].as_string() == "some string"
|
||||
)
|
||||
|
||||
.. versionadded:: 1.4
|
||||
|
||||
|
||||
"""
|
||||
|
||||
# note there was a result processor here that was looking for "number",
|
||||
# but none of the tests seem to exercise it.
|
||||
|
||||
|
||||
# Note: these objects currently match exactly those of MySQL, however since
|
||||
# these are not generalizable to all JSON implementations, remain separately
|
||||
# implemented for each dialect.
|
||||
class _FormatTypeMixin:
|
||||
def _format_value(self, value):
|
||||
raise NotImplementedError()
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
super_proc = self.string_bind_processor(dialect)
|
||||
|
||||
def process(value):
|
||||
value = self._format_value(value)
|
||||
if super_proc:
|
||||
value = super_proc(value)
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
def literal_processor(self, dialect):
|
||||
super_proc = self.string_literal_processor(dialect)
|
||||
|
||||
def process(value):
|
||||
value = self._format_value(value)
|
||||
if super_proc:
|
||||
value = super_proc(value)
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
|
||||
class JSONIndexType(_FormatTypeMixin, sqltypes.JSON.JSONIndexType):
|
||||
def _format_value(self, value):
|
||||
if isinstance(value, int):
|
||||
value = "$[%s]" % value
|
||||
else:
|
||||
value = '$."%s"' % value
|
||||
return value
|
||||
|
||||
|
||||
class JSONPathType(_FormatTypeMixin, sqltypes.JSON.JSONPathType):
|
||||
def _format_value(self, value):
|
||||
return "$%s" % (
|
||||
"".join(
|
||||
[
|
||||
"[%s]" % elem if isinstance(elem, int) else '."%s"' % elem
|
||||
for elem in value
|
||||
]
|
||||
)
|
||||
)
|
|
@ -0,0 +1,155 @@
|
|||
# dialects/mssql/provision.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
from sqlalchemy import inspect
|
||||
from sqlalchemy import Integer
|
||||
from ... import create_engine
|
||||
from ... import exc
|
||||
from ...schema import Column
|
||||
from ...schema import DropConstraint
|
||||
from ...schema import ForeignKeyConstraint
|
||||
from ...schema import MetaData
|
||||
from ...schema import Table
|
||||
from ...testing.provision import create_db
|
||||
from ...testing.provision import drop_all_schema_objects_pre_tables
|
||||
from ...testing.provision import drop_db
|
||||
from ...testing.provision import generate_driver_url
|
||||
from ...testing.provision import get_temp_table_name
|
||||
from ...testing.provision import log
|
||||
from ...testing.provision import normalize_sequence
|
||||
from ...testing.provision import run_reap_dbs
|
||||
from ...testing.provision import temp_table_keyword_args
|
||||
|
||||
|
||||
@generate_driver_url.for_db("mssql")
|
||||
def generate_driver_url(url, driver, query_str):
|
||||
backend = url.get_backend_name()
|
||||
|
||||
new_url = url.set(drivername="%s+%s" % (backend, driver))
|
||||
|
||||
if driver not in ("pyodbc", "aioodbc"):
|
||||
new_url = new_url.set(query="")
|
||||
|
||||
if driver == "aioodbc":
|
||||
new_url = new_url.update_query_dict({"MARS_Connection": "Yes"})
|
||||
|
||||
if query_str:
|
||||
new_url = new_url.update_query_string(query_str)
|
||||
|
||||
try:
|
||||
new_url.get_dialect()
|
||||
except exc.NoSuchModuleError:
|
||||
return None
|
||||
else:
|
||||
return new_url
|
||||
|
||||
|
||||
@create_db.for_db("mssql")
|
||||
def _mssql_create_db(cfg, eng, ident):
|
||||
with eng.connect().execution_options(isolation_level="AUTOCOMMIT") as conn:
|
||||
conn.exec_driver_sql("create database %s" % ident)
|
||||
conn.exec_driver_sql(
|
||||
"ALTER DATABASE %s SET ALLOW_SNAPSHOT_ISOLATION ON" % ident
|
||||
)
|
||||
conn.exec_driver_sql(
|
||||
"ALTER DATABASE %s SET READ_COMMITTED_SNAPSHOT ON" % ident
|
||||
)
|
||||
conn.exec_driver_sql("use %s" % ident)
|
||||
conn.exec_driver_sql("create schema test_schema")
|
||||
conn.exec_driver_sql("create schema test_schema_2")
|
||||
|
||||
|
||||
@drop_db.for_db("mssql")
|
||||
def _mssql_drop_db(cfg, eng, ident):
|
||||
with eng.connect().execution_options(isolation_level="AUTOCOMMIT") as conn:
|
||||
_mssql_drop_ignore(conn, ident)
|
||||
|
||||
|
||||
def _mssql_drop_ignore(conn, ident):
|
||||
try:
|
||||
# typically when this happens, we can't KILL the session anyway,
|
||||
# so let the cleanup process drop the DBs
|
||||
# for row in conn.exec_driver_sql(
|
||||
# "select session_id from sys.dm_exec_sessions "
|
||||
# "where database_id=db_id('%s')" % ident):
|
||||
# log.info("killing SQL server session %s", row['session_id'])
|
||||
# conn.exec_driver_sql("kill %s" % row['session_id'])
|
||||
conn.exec_driver_sql("drop database %s" % ident)
|
||||
log.info("Reaped db: %s", ident)
|
||||
return True
|
||||
except exc.DatabaseError as err:
|
||||
log.warning("couldn't drop db: %s", err)
|
||||
return False
|
||||
|
||||
|
||||
@run_reap_dbs.for_db("mssql")
|
||||
def _reap_mssql_dbs(url, idents):
|
||||
log.info("db reaper connecting to %r", url)
|
||||
eng = create_engine(url)
|
||||
with eng.connect().execution_options(isolation_level="AUTOCOMMIT") as conn:
|
||||
log.info("identifiers in file: %s", ", ".join(idents))
|
||||
|
||||
to_reap = conn.exec_driver_sql(
|
||||
"select d.name from sys.databases as d where name "
|
||||
"like 'TEST_%' and not exists (select session_id "
|
||||
"from sys.dm_exec_sessions "
|
||||
"where database_id=d.database_id)"
|
||||
)
|
||||
all_names = {dbname.lower() for (dbname,) in to_reap}
|
||||
to_drop = set()
|
||||
for name in all_names:
|
||||
if name in idents:
|
||||
to_drop.add(name)
|
||||
|
||||
dropped = total = 0
|
||||
for total, dbname in enumerate(to_drop, 1):
|
||||
if _mssql_drop_ignore(conn, dbname):
|
||||
dropped += 1
|
||||
log.info(
|
||||
"Dropped %d out of %d stale databases detected", dropped, total
|
||||
)
|
||||
|
||||
|
||||
@temp_table_keyword_args.for_db("mssql")
|
||||
def _mssql_temp_table_keyword_args(cfg, eng):
|
||||
return {}
|
||||
|
||||
|
||||
@get_temp_table_name.for_db("mssql")
|
||||
def _mssql_get_temp_table_name(cfg, eng, base_name):
|
||||
return "##" + base_name
|
||||
|
||||
|
||||
@drop_all_schema_objects_pre_tables.for_db("mssql")
|
||||
def drop_all_schema_objects_pre_tables(cfg, eng):
|
||||
with eng.connect().execution_options(isolation_level="AUTOCOMMIT") as conn:
|
||||
inspector = inspect(conn)
|
||||
for schema in (None, "dbo", cfg.test_schema, cfg.test_schema_2):
|
||||
for tname in inspector.get_table_names(schema=schema):
|
||||
tb = Table(
|
||||
tname,
|
||||
MetaData(),
|
||||
Column("x", Integer),
|
||||
Column("y", Integer),
|
||||
schema=schema,
|
||||
)
|
||||
for fk in inspect(conn).get_foreign_keys(tname, schema=schema):
|
||||
conn.execute(
|
||||
DropConstraint(
|
||||
ForeignKeyConstraint(
|
||||
[tb.c.x], [tb.c.y], name=fk["name"]
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
@normalize_sequence.for_db("mssql")
|
||||
def normalize_sequence(cfg, sequence):
|
||||
if sequence.start is None:
|
||||
sequence.start = 1
|
||||
return sequence
|
|
@ -0,0 +1,125 @@
|
|||
# dialects/mssql/pymssql.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
|
||||
"""
|
||||
.. dialect:: mssql+pymssql
|
||||
:name: pymssql
|
||||
:dbapi: pymssql
|
||||
:connectstring: mssql+pymssql://<username>:<password>@<freetds_name>/?charset=utf8
|
||||
|
||||
pymssql is a Python module that provides a Python DBAPI interface around
|
||||
`FreeTDS <https://www.freetds.org/>`_.
|
||||
|
||||
.. versionchanged:: 2.0.5
|
||||
|
||||
pymssql was restored to SQLAlchemy's continuous integration testing
|
||||
|
||||
|
||||
""" # noqa
|
||||
import re
|
||||
|
||||
from .base import MSDialect
|
||||
from .base import MSIdentifierPreparer
|
||||
from ... import types as sqltypes
|
||||
from ... import util
|
||||
from ...engine import processors
|
||||
|
||||
|
||||
class _MSNumeric_pymssql(sqltypes.Numeric):
|
||||
def result_processor(self, dialect, type_):
|
||||
if not self.asdecimal:
|
||||
return processors.to_float
|
||||
else:
|
||||
return sqltypes.Numeric.result_processor(self, dialect, type_)
|
||||
|
||||
|
||||
class MSIdentifierPreparer_pymssql(MSIdentifierPreparer):
|
||||
def __init__(self, dialect):
|
||||
super().__init__(dialect)
|
||||
# pymssql has the very unusual behavior that it uses pyformat
|
||||
# yet does not require that percent signs be doubled
|
||||
self._double_percents = False
|
||||
|
||||
|
||||
class MSDialect_pymssql(MSDialect):
|
||||
supports_statement_cache = True
|
||||
supports_native_decimal = True
|
||||
supports_native_uuid = True
|
||||
driver = "pymssql"
|
||||
|
||||
preparer = MSIdentifierPreparer_pymssql
|
||||
|
||||
colspecs = util.update_copy(
|
||||
MSDialect.colspecs,
|
||||
{sqltypes.Numeric: _MSNumeric_pymssql, sqltypes.Float: sqltypes.Float},
|
||||
)
|
||||
|
||||
@classmethod
|
||||
def import_dbapi(cls):
|
||||
module = __import__("pymssql")
|
||||
# pymmsql < 2.1.1 doesn't have a Binary method. we use string
|
||||
client_ver = tuple(int(x) for x in module.__version__.split("."))
|
||||
if client_ver < (2, 1, 1):
|
||||
# TODO: monkeypatching here is less than ideal
|
||||
module.Binary = lambda x: x if hasattr(x, "decode") else str(x)
|
||||
|
||||
if client_ver < (1,):
|
||||
util.warn(
|
||||
"The pymssql dialect expects at least "
|
||||
"the 1.0 series of the pymssql DBAPI."
|
||||
)
|
||||
return module
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
vers = connection.exec_driver_sql("select @@version").scalar()
|
||||
m = re.match(r"Microsoft .*? - (\d+)\.(\d+)\.(\d+)\.(\d+)", vers)
|
||||
if m:
|
||||
return tuple(int(x) for x in m.group(1, 2, 3, 4))
|
||||
else:
|
||||
return None
|
||||
|
||||
def create_connect_args(self, url):
|
||||
opts = url.translate_connect_args(username="user")
|
||||
opts.update(url.query)
|
||||
port = opts.pop("port", None)
|
||||
if port and "host" in opts:
|
||||
opts["host"] = "%s:%s" % (opts["host"], port)
|
||||
return ([], opts)
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
for msg in (
|
||||
"Adaptive Server connection timed out",
|
||||
"Net-Lib error during Connection reset by peer",
|
||||
"message 20003", # connection timeout
|
||||
"Error 10054",
|
||||
"Not connected to any MS SQL server",
|
||||
"Connection is closed",
|
||||
"message 20006", # Write to the server failed
|
||||
"message 20017", # Unexpected EOF from the server
|
||||
"message 20047", # DBPROCESS is dead or not enabled
|
||||
):
|
||||
if msg in str(e):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def get_isolation_level_values(self, dbapi_connection):
|
||||
return super().get_isolation_level_values(dbapi_connection) + [
|
||||
"AUTOCOMMIT"
|
||||
]
|
||||
|
||||
def set_isolation_level(self, dbapi_connection, level):
|
||||
if level == "AUTOCOMMIT":
|
||||
dbapi_connection.autocommit(True)
|
||||
else:
|
||||
dbapi_connection.autocommit(False)
|
||||
super().set_isolation_level(dbapi_connection, level)
|
||||
|
||||
|
||||
dialect = MSDialect_pymssql
|
|
@ -0,0 +1,745 @@
|
|||
# dialects/mssql/pyodbc.py
|
||||
# Copyright (C) 2005-2024 the SQLAlchemy authors and contributors
|
||||
# <see AUTHORS file>
|
||||
#
|
||||
# This module is part of SQLAlchemy and is released under
|
||||
# the MIT License: https://www.opensource.org/licenses/mit-license.php
|
||||
# mypy: ignore-errors
|
||||
|
||||
r"""
|
||||
.. dialect:: mssql+pyodbc
|
||||
:name: PyODBC
|
||||
:dbapi: pyodbc
|
||||
:connectstring: mssql+pyodbc://<username>:<password>@<dsnname>
|
||||
:url: https://pypi.org/project/pyodbc/
|
||||
|
||||
Connecting to PyODBC
|
||||
--------------------
|
||||
|
||||
The URL here is to be translated to PyODBC connection strings, as
|
||||
detailed in `ConnectionStrings <https://code.google.com/p/pyodbc/wiki/ConnectionStrings>`_.
|
||||
|
||||
DSN Connections
|
||||
^^^^^^^^^^^^^^^
|
||||
|
||||
A DSN connection in ODBC means that a pre-existing ODBC datasource is
|
||||
configured on the client machine. The application then specifies the name
|
||||
of this datasource, which encompasses details such as the specific ODBC driver
|
||||
in use as well as the network address of the database. Assuming a datasource
|
||||
is configured on the client, a basic DSN-based connection looks like::
|
||||
|
||||
engine = create_engine("mssql+pyodbc://scott:tiger@some_dsn")
|
||||
|
||||
Which above, will pass the following connection string to PyODBC::
|
||||
|
||||
DSN=some_dsn;UID=scott;PWD=tiger
|
||||
|
||||
If the username and password are omitted, the DSN form will also add
|
||||
the ``Trusted_Connection=yes`` directive to the ODBC string.
|
||||
|
||||
Hostname Connections
|
||||
^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Hostname-based connections are also supported by pyodbc. These are often
|
||||
easier to use than a DSN and have the additional advantage that the specific
|
||||
database name to connect towards may be specified locally in the URL, rather
|
||||
than it being fixed as part of a datasource configuration.
|
||||
|
||||
When using a hostname connection, the driver name must also be specified in the
|
||||
query parameters of the URL. As these names usually have spaces in them, the
|
||||
name must be URL encoded which means using plus signs for spaces::
|
||||
|
||||
engine = create_engine("mssql+pyodbc://scott:tiger@myhost:port/databasename?driver=ODBC+Driver+17+for+SQL+Server")
|
||||
|
||||
The ``driver`` keyword is significant to the pyodbc dialect and must be
|
||||
specified in lowercase.
|
||||
|
||||
Any other names passed in the query string are passed through in the pyodbc
|
||||
connect string, such as ``authentication``, ``TrustServerCertificate``, etc.
|
||||
Multiple keyword arguments must be separated by an ampersand (``&``); these
|
||||
will be translated to semicolons when the pyodbc connect string is generated
|
||||
internally::
|
||||
|
||||
e = create_engine(
|
||||
"mssql+pyodbc://scott:tiger@mssql2017:1433/test?"
|
||||
"driver=ODBC+Driver+18+for+SQL+Server&TrustServerCertificate=yes"
|
||||
"&authentication=ActiveDirectoryIntegrated"
|
||||
)
|
||||
|
||||
The equivalent URL can be constructed using :class:`_sa.engine.URL`::
|
||||
|
||||
from sqlalchemy.engine import URL
|
||||
connection_url = URL.create(
|
||||
"mssql+pyodbc",
|
||||
username="scott",
|
||||
password="tiger",
|
||||
host="mssql2017",
|
||||
port=1433,
|
||||
database="test",
|
||||
query={
|
||||
"driver": "ODBC Driver 18 for SQL Server",
|
||||
"TrustServerCertificate": "yes",
|
||||
"authentication": "ActiveDirectoryIntegrated",
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
Pass through exact Pyodbc string
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
A PyODBC connection string can also be sent in pyodbc's format directly, as
|
||||
specified in `the PyODBC documentation
|
||||
<https://github.com/mkleehammer/pyodbc/wiki/Connecting-to-databases>`_,
|
||||
using the parameter ``odbc_connect``. A :class:`_sa.engine.URL` object
|
||||
can help make this easier::
|
||||
|
||||
from sqlalchemy.engine import URL
|
||||
connection_string = "DRIVER={SQL Server Native Client 10.0};SERVER=dagger;DATABASE=test;UID=user;PWD=password"
|
||||
connection_url = URL.create("mssql+pyodbc", query={"odbc_connect": connection_string})
|
||||
|
||||
engine = create_engine(connection_url)
|
||||
|
||||
.. _mssql_pyodbc_access_tokens:
|
||||
|
||||
Connecting to databases with access tokens
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Some database servers are set up to only accept access tokens for login. For
|
||||
example, SQL Server allows the use of Azure Active Directory tokens to connect
|
||||
to databases. This requires creating a credential object using the
|
||||
``azure-identity`` library. More information about the authentication step can be
|
||||
found in `Microsoft's documentation
|
||||
<https://docs.microsoft.com/en-us/azure/developer/python/azure-sdk-authenticate?tabs=bash>`_.
|
||||
|
||||
After getting an engine, the credentials need to be sent to ``pyodbc.connect``
|
||||
each time a connection is requested. One way to do this is to set up an event
|
||||
listener on the engine that adds the credential token to the dialect's connect
|
||||
call. This is discussed more generally in :ref:`engines_dynamic_tokens`. For
|
||||
SQL Server in particular, this is passed as an ODBC connection attribute with
|
||||
a data structure `described by Microsoft
|
||||
<https://docs.microsoft.com/en-us/sql/connect/odbc/using-azure-active-directory#authenticating-with-an-access-token>`_.
|
||||
|
||||
The following code snippet will create an engine that connects to an Azure SQL
|
||||
database using Azure credentials::
|
||||
|
||||
import struct
|
||||
from sqlalchemy import create_engine, event
|
||||
from sqlalchemy.engine.url import URL
|
||||
from azure import identity
|
||||
|
||||
SQL_COPT_SS_ACCESS_TOKEN = 1256 # Connection option for access tokens, as defined in msodbcsql.h
|
||||
TOKEN_URL = "https://database.windows.net/" # The token URL for any Azure SQL database
|
||||
|
||||
connection_string = "mssql+pyodbc://@my-server.database.windows.net/myDb?driver=ODBC+Driver+17+for+SQL+Server"
|
||||
|
||||
engine = create_engine(connection_string)
|
||||
|
||||
azure_credentials = identity.DefaultAzureCredential()
|
||||
|
||||
@event.listens_for(engine, "do_connect")
|
||||
def provide_token(dialect, conn_rec, cargs, cparams):
|
||||
# remove the "Trusted_Connection" parameter that SQLAlchemy adds
|
||||
cargs[0] = cargs[0].replace(";Trusted_Connection=Yes", "")
|
||||
|
||||
# create token credential
|
||||
raw_token = azure_credentials.get_token(TOKEN_URL).token.encode("utf-16-le")
|
||||
token_struct = struct.pack(f"<I{len(raw_token)}s", len(raw_token), raw_token)
|
||||
|
||||
# apply it to keyword arguments
|
||||
cparams["attrs_before"] = {SQL_COPT_SS_ACCESS_TOKEN: token_struct}
|
||||
|
||||
.. tip::
|
||||
|
||||
The ``Trusted_Connection`` token is currently added by the SQLAlchemy
|
||||
pyodbc dialect when no username or password is present. This needs
|
||||
to be removed per Microsoft's
|
||||
`documentation for Azure access tokens
|
||||
<https://docs.microsoft.com/en-us/sql/connect/odbc/using-azure-active-directory#authenticating-with-an-access-token>`_,
|
||||
stating that a connection string when using an access token must not contain
|
||||
``UID``, ``PWD``, ``Authentication`` or ``Trusted_Connection`` parameters.
|
||||
|
||||
.. _azure_synapse_ignore_no_transaction_on_rollback:
|
||||
|
||||
Avoiding transaction-related exceptions on Azure Synapse Analytics
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Azure Synapse Analytics has a significant difference in its transaction
|
||||
handling compared to plain SQL Server; in some cases an error within a Synapse
|
||||
transaction can cause it to be arbitrarily terminated on the server side, which
|
||||
then causes the DBAPI ``.rollback()`` method (as well as ``.commit()``) to
|
||||
fail. The issue prevents the usual DBAPI contract of allowing ``.rollback()``
|
||||
to pass silently if no transaction is present as the driver does not expect
|
||||
this condition. The symptom of this failure is an exception with a message
|
||||
resembling 'No corresponding transaction found. (111214)' when attempting to
|
||||
emit a ``.rollback()`` after an operation had a failure of some kind.
|
||||
|
||||
This specific case can be handled by passing ``ignore_no_transaction_on_rollback=True`` to
|
||||
the SQL Server dialect via the :func:`_sa.create_engine` function as follows::
|
||||
|
||||
engine = create_engine(connection_url, ignore_no_transaction_on_rollback=True)
|
||||
|
||||
Using the above parameter, the dialect will catch ``ProgrammingError``
|
||||
exceptions raised during ``connection.rollback()`` and emit a warning
|
||||
if the error message contains code ``111214``, however will not raise
|
||||
an exception.
|
||||
|
||||
.. versionadded:: 1.4.40 Added the
|
||||
``ignore_no_transaction_on_rollback=True`` parameter.
|
||||
|
||||
Enable autocommit for Azure SQL Data Warehouse (DW) connections
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
Azure SQL Data Warehouse does not support transactions,
|
||||
and that can cause problems with SQLAlchemy's "autobegin" (and implicit
|
||||
commit/rollback) behavior. We can avoid these problems by enabling autocommit
|
||||
at both the pyodbc and engine levels::
|
||||
|
||||
connection_url = sa.engine.URL.create(
|
||||
"mssql+pyodbc",
|
||||
username="scott",
|
||||
password="tiger",
|
||||
host="dw.azure.example.com",
|
||||
database="mydb",
|
||||
query={
|
||||
"driver": "ODBC Driver 17 for SQL Server",
|
||||
"autocommit": "True",
|
||||
},
|
||||
)
|
||||
|
||||
engine = create_engine(connection_url).execution_options(
|
||||
isolation_level="AUTOCOMMIT"
|
||||
)
|
||||
|
||||
Avoiding sending large string parameters as TEXT/NTEXT
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
|
||||
By default, for historical reasons, Microsoft's ODBC drivers for SQL Server
|
||||
send long string parameters (greater than 4000 SBCS characters or 2000 Unicode
|
||||
characters) as TEXT/NTEXT values. TEXT and NTEXT have been deprecated for many
|
||||
years and are starting to cause compatibility issues with newer versions of
|
||||
SQL_Server/Azure. For example, see `this
|
||||
issue <https://github.com/mkleehammer/pyodbc/issues/835>`_.
|
||||
|
||||
Starting with ODBC Driver 18 for SQL Server we can override the legacy
|
||||
behavior and pass long strings as varchar(max)/nvarchar(max) using the
|
||||
``LongAsMax=Yes`` connection string parameter::
|
||||
|
||||
connection_url = sa.engine.URL.create(
|
||||
"mssql+pyodbc",
|
||||
username="scott",
|
||||
password="tiger",
|
||||
host="mssqlserver.example.com",
|
||||
database="mydb",
|
||||
query={
|
||||
"driver": "ODBC Driver 18 for SQL Server",
|
||||
"LongAsMax": "Yes",
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
Pyodbc Pooling / connection close behavior
|
||||
------------------------------------------
|
||||
|
||||
PyODBC uses internal `pooling
|
||||
<https://github.com/mkleehammer/pyodbc/wiki/The-pyodbc-Module#pooling>`_ by
|
||||
default, which means connections will be longer lived than they are within
|
||||
SQLAlchemy itself. As SQLAlchemy has its own pooling behavior, it is often
|
||||
preferable to disable this behavior. This behavior can only be disabled
|
||||
globally at the PyODBC module level, **before** any connections are made::
|
||||
|
||||
import pyodbc
|
||||
|
||||
pyodbc.pooling = False
|
||||
|
||||
# don't use the engine before pooling is set to False
|
||||
engine = create_engine("mssql+pyodbc://user:pass@dsn")
|
||||
|
||||
If this variable is left at its default value of ``True``, **the application
|
||||
will continue to maintain active database connections**, even when the
|
||||
SQLAlchemy engine itself fully discards a connection or if the engine is
|
||||
disposed.
|
||||
|
||||
.. seealso::
|
||||
|
||||
`pooling <https://github.com/mkleehammer/pyodbc/wiki/The-pyodbc-Module#pooling>`_ -
|
||||
in the PyODBC documentation.
|
||||
|
||||
Driver / Unicode Support
|
||||
-------------------------
|
||||
|
||||
PyODBC works best with Microsoft ODBC drivers, particularly in the area
|
||||
of Unicode support on both Python 2 and Python 3.
|
||||
|
||||
Using the FreeTDS ODBC drivers on Linux or OSX with PyODBC is **not**
|
||||
recommended; there have been historically many Unicode-related issues
|
||||
in this area, including before Microsoft offered ODBC drivers for Linux
|
||||
and OSX. Now that Microsoft offers drivers for all platforms, for
|
||||
PyODBC support these are recommended. FreeTDS remains relevant for
|
||||
non-ODBC drivers such as pymssql where it works very well.
|
||||
|
||||
|
||||
Rowcount Support
|
||||
----------------
|
||||
|
||||
Previous limitations with the SQLAlchemy ORM's "versioned rows" feature with
|
||||
Pyodbc have been resolved as of SQLAlchemy 2.0.5. See the notes at
|
||||
:ref:`mssql_rowcount_versioning`.
|
||||
|
||||
.. _mssql_pyodbc_fastexecutemany:
|
||||
|
||||
Fast Executemany Mode
|
||||
---------------------
|
||||
|
||||
The PyODBC driver includes support for a "fast executemany" mode of execution
|
||||
which greatly reduces round trips for a DBAPI ``executemany()`` call when using
|
||||
Microsoft ODBC drivers, for **limited size batches that fit in memory**. The
|
||||
feature is enabled by setting the attribute ``.fast_executemany`` on the DBAPI
|
||||
cursor when an executemany call is to be used. The SQLAlchemy PyODBC SQL
|
||||
Server dialect supports this parameter by passing the
|
||||
``fast_executemany`` parameter to
|
||||
:func:`_sa.create_engine` , when using the **Microsoft ODBC driver only**::
|
||||
|
||||
engine = create_engine(
|
||||
"mssql+pyodbc://scott:tiger@mssql2017:1433/test?driver=ODBC+Driver+17+for+SQL+Server",
|
||||
fast_executemany=True)
|
||||
|
||||
.. versionchanged:: 2.0.9 - the ``fast_executemany`` parameter now has its
|
||||
intended effect of this PyODBC feature taking effect for all INSERT
|
||||
statements that are executed with multiple parameter sets, which don't
|
||||
include RETURNING. Previously, SQLAlchemy 2.0's :term:`insertmanyvalues`
|
||||
feature would cause ``fast_executemany`` to not be used in most cases
|
||||
even if specified.
|
||||
|
||||
.. versionadded:: 1.3
|
||||
|
||||
.. seealso::
|
||||
|
||||
`fast executemany <https://github.com/mkleehammer/pyodbc/wiki/Features-beyond-the-DB-API#fast_executemany>`_
|
||||
- on github
|
||||
|
||||
.. _mssql_pyodbc_setinputsizes:
|
||||
|
||||
Setinputsizes Support
|
||||
-----------------------
|
||||
|
||||
As of version 2.0, the pyodbc ``cursor.setinputsizes()`` method is used for
|
||||
all statement executions, except for ``cursor.executemany()`` calls when
|
||||
fast_executemany=True where it is not supported (assuming
|
||||
:ref:`insertmanyvalues <engine_insertmanyvalues>` is kept enabled,
|
||||
"fastexecutemany" will not take place for INSERT statements in any case).
|
||||
|
||||
The use of ``cursor.setinputsizes()`` can be disabled by passing
|
||||
``use_setinputsizes=False`` to :func:`_sa.create_engine`.
|
||||
|
||||
When ``use_setinputsizes`` is left at its default of ``True``, the
|
||||
specific per-type symbols passed to ``cursor.setinputsizes()`` can be
|
||||
programmatically customized using the :meth:`.DialectEvents.do_setinputsizes`
|
||||
hook. See that method for usage examples.
|
||||
|
||||
.. versionchanged:: 2.0 The mssql+pyodbc dialect now defaults to using
|
||||
``use_setinputsizes=True`` for all statement executions with the exception of
|
||||
cursor.executemany() calls when fast_executemany=True. The behavior can
|
||||
be turned off by passing ``use_setinputsizes=False`` to
|
||||
:func:`_sa.create_engine`.
|
||||
|
||||
""" # noqa
|
||||
|
||||
|
||||
import datetime
|
||||
import decimal
|
||||
import re
|
||||
import struct
|
||||
|
||||
from .base import _MSDateTime
|
||||
from .base import _MSUnicode
|
||||
from .base import _MSUnicodeText
|
||||
from .base import BINARY
|
||||
from .base import DATETIMEOFFSET
|
||||
from .base import MSDialect
|
||||
from .base import MSExecutionContext
|
||||
from .base import VARBINARY
|
||||
from .json import JSON as _MSJson
|
||||
from .json import JSONIndexType as _MSJsonIndexType
|
||||
from .json import JSONPathType as _MSJsonPathType
|
||||
from ... import exc
|
||||
from ... import types as sqltypes
|
||||
from ... import util
|
||||
from ...connectors.pyodbc import PyODBCConnector
|
||||
from ...engine import cursor as _cursor
|
||||
|
||||
|
||||
class _ms_numeric_pyodbc:
|
||||
"""Turns Decimals with adjusted() < 0 or > 7 into strings.
|
||||
|
||||
The routines here are needed for older pyodbc versions
|
||||
as well as current mxODBC versions.
|
||||
|
||||
"""
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
super_process = super().bind_processor(dialect)
|
||||
|
||||
if not dialect._need_decimal_fix:
|
||||
return super_process
|
||||
|
||||
def process(value):
|
||||
if self.asdecimal and isinstance(value, decimal.Decimal):
|
||||
adjusted = value.adjusted()
|
||||
if adjusted < 0:
|
||||
return self._small_dec_to_string(value)
|
||||
elif adjusted > 7:
|
||||
return self._large_dec_to_string(value)
|
||||
|
||||
if super_process:
|
||||
return super_process(value)
|
||||
else:
|
||||
return value
|
||||
|
||||
return process
|
||||
|
||||
# these routines needed for older versions of pyodbc.
|
||||
# as of 2.1.8 this logic is integrated.
|
||||
|
||||
def _small_dec_to_string(self, value):
|
||||
return "%s0.%s%s" % (
|
||||
(value < 0 and "-" or ""),
|
||||
"0" * (abs(value.adjusted()) - 1),
|
||||
"".join([str(nint) for nint in value.as_tuple()[1]]),
|
||||
)
|
||||
|
||||
def _large_dec_to_string(self, value):
|
||||
_int = value.as_tuple()[1]
|
||||
if "E" in str(value):
|
||||
result = "%s%s%s" % (
|
||||
(value < 0 and "-" or ""),
|
||||
"".join([str(s) for s in _int]),
|
||||
"0" * (value.adjusted() - (len(_int) - 1)),
|
||||
)
|
||||
else:
|
||||
if (len(_int) - 1) > value.adjusted():
|
||||
result = "%s%s.%s" % (
|
||||
(value < 0 and "-" or ""),
|
||||
"".join([str(s) for s in _int][0 : value.adjusted() + 1]),
|
||||
"".join([str(s) for s in _int][value.adjusted() + 1 :]),
|
||||
)
|
||||
else:
|
||||
result = "%s%s" % (
|
||||
(value < 0 and "-" or ""),
|
||||
"".join([str(s) for s in _int][0 : value.adjusted() + 1]),
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
class _MSNumeric_pyodbc(_ms_numeric_pyodbc, sqltypes.Numeric):
|
||||
pass
|
||||
|
||||
|
||||
class _MSFloat_pyodbc(_ms_numeric_pyodbc, sqltypes.Float):
|
||||
pass
|
||||
|
||||
|
||||
class _ms_binary_pyodbc:
|
||||
"""Wraps binary values in dialect-specific Binary wrapper.
|
||||
If the value is null, return a pyodbc-specific BinaryNull
|
||||
object to prevent pyODBC [and FreeTDS] from defaulting binary
|
||||
NULL types to SQLWCHAR and causing implicit conversion errors.
|
||||
"""
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
if dialect.dbapi is None:
|
||||
return None
|
||||
|
||||
DBAPIBinary = dialect.dbapi.Binary
|
||||
|
||||
def process(value):
|
||||
if value is not None:
|
||||
return DBAPIBinary(value)
|
||||
else:
|
||||
# pyodbc-specific
|
||||
return dialect.dbapi.BinaryNull
|
||||
|
||||
return process
|
||||
|
||||
|
||||
class _ODBCDateTimeBindProcessor:
|
||||
"""Add bind processors to handle datetimeoffset behaviors"""
|
||||
|
||||
has_tz = False
|
||||
|
||||
def bind_processor(self, dialect):
|
||||
def process(value):
|
||||
if value is None:
|
||||
return None
|
||||
elif isinstance(value, str):
|
||||
# if a string was passed directly, allow it through
|
||||
return value
|
||||
elif not value.tzinfo or (not self.timezone and not self.has_tz):
|
||||
# for DateTime(timezone=False)
|
||||
return value
|
||||
else:
|
||||
# for DATETIMEOFFSET or DateTime(timezone=True)
|
||||
#
|
||||
# Convert to string format required by T-SQL
|
||||
dto_string = value.strftime("%Y-%m-%d %H:%M:%S.%f %z")
|
||||
# offset needs a colon, e.g., -0700 -> -07:00
|
||||
# "UTC offset in the form (+-)HHMM[SS[.ffffff]]"
|
||||
# backend currently rejects seconds / fractional seconds
|
||||
dto_string = re.sub(
|
||||
r"([\+\-]\d{2})([\d\.]+)$", r"\1:\2", dto_string
|
||||
)
|
||||
return dto_string
|
||||
|
||||
return process
|
||||
|
||||
|
||||
class _ODBCDateTime(_ODBCDateTimeBindProcessor, _MSDateTime):
|
||||
pass
|
||||
|
||||
|
||||
class _ODBCDATETIMEOFFSET(_ODBCDateTimeBindProcessor, DATETIMEOFFSET):
|
||||
has_tz = True
|
||||
|
||||
|
||||
class _VARBINARY_pyodbc(_ms_binary_pyodbc, VARBINARY):
|
||||
pass
|
||||
|
||||
|
||||
class _BINARY_pyodbc(_ms_binary_pyodbc, BINARY):
|
||||
pass
|
||||
|
||||
|
||||
class _String_pyodbc(sqltypes.String):
|
||||
def get_dbapi_type(self, dbapi):
|
||||
if self.length in (None, "max") or self.length >= 2000:
|
||||
return (dbapi.SQL_VARCHAR, 0, 0)
|
||||
else:
|
||||
return dbapi.SQL_VARCHAR
|
||||
|
||||
|
||||
class _Unicode_pyodbc(_MSUnicode):
|
||||
def get_dbapi_type(self, dbapi):
|
||||
if self.length in (None, "max") or self.length >= 2000:
|
||||
return (dbapi.SQL_WVARCHAR, 0, 0)
|
||||
else:
|
||||
return dbapi.SQL_WVARCHAR
|
||||
|
||||
|
||||
class _UnicodeText_pyodbc(_MSUnicodeText):
|
||||
def get_dbapi_type(self, dbapi):
|
||||
if self.length in (None, "max") or self.length >= 2000:
|
||||
return (dbapi.SQL_WVARCHAR, 0, 0)
|
||||
else:
|
||||
return dbapi.SQL_WVARCHAR
|
||||
|
||||
|
||||
class _JSON_pyodbc(_MSJson):
|
||||
def get_dbapi_type(self, dbapi):
|
||||
return (dbapi.SQL_WVARCHAR, 0, 0)
|
||||
|
||||
|
||||
class _JSONIndexType_pyodbc(_MSJsonIndexType):
|
||||
def get_dbapi_type(self, dbapi):
|
||||
return dbapi.SQL_WVARCHAR
|
||||
|
||||
|
||||
class _JSONPathType_pyodbc(_MSJsonPathType):
|
||||
def get_dbapi_type(self, dbapi):
|
||||
return dbapi.SQL_WVARCHAR
|
||||
|
||||
|
||||
class MSExecutionContext_pyodbc(MSExecutionContext):
|
||||
_embedded_scope_identity = False
|
||||
|
||||
def pre_exec(self):
|
||||
"""where appropriate, issue "select scope_identity()" in the same
|
||||
statement.
|
||||
|
||||
Background on why "scope_identity()" is preferable to "@@identity":
|
||||
https://msdn.microsoft.com/en-us/library/ms190315.aspx
|
||||
|
||||
Background on why we attempt to embed "scope_identity()" into the same
|
||||
statement as the INSERT:
|
||||
https://code.google.com/p/pyodbc/wiki/FAQs#How_do_I_retrieve_autogenerated/identity_values?
|
||||
|
||||
"""
|
||||
|
||||
super().pre_exec()
|
||||
|
||||
# don't embed the scope_identity select into an
|
||||
# "INSERT .. DEFAULT VALUES"
|
||||
if (
|
||||
self._select_lastrowid
|
||||
and self.dialect.use_scope_identity
|
||||
and len(self.parameters[0])
|
||||
):
|
||||
self._embedded_scope_identity = True
|
||||
|
||||
self.statement += "; select scope_identity()"
|
||||
|
||||
def post_exec(self):
|
||||
if self._embedded_scope_identity:
|
||||
# Fetch the last inserted id from the manipulated statement
|
||||
# We may have to skip over a number of result sets with
|
||||
# no data (due to triggers, etc.)
|
||||
while True:
|
||||
try:
|
||||
# fetchall() ensures the cursor is consumed
|
||||
# without closing it (FreeTDS particularly)
|
||||
rows = self.cursor.fetchall()
|
||||
except self.dialect.dbapi.Error:
|
||||
# no way around this - nextset() consumes the previous set
|
||||
# so we need to just keep flipping
|
||||
self.cursor.nextset()
|
||||
else:
|
||||
if not rows:
|
||||
# async adapter drivers just return None here
|
||||
self.cursor.nextset()
|
||||
continue
|
||||
row = rows[0]
|
||||
break
|
||||
|
||||
self._lastrowid = int(row[0])
|
||||
|
||||
self.cursor_fetch_strategy = _cursor._NO_CURSOR_DML
|
||||
else:
|
||||
super().post_exec()
|
||||
|
||||
|
||||
class MSDialect_pyodbc(PyODBCConnector, MSDialect):
|
||||
supports_statement_cache = True
|
||||
|
||||
# note this parameter is no longer used by the ORM or default dialect
|
||||
# see #9414
|
||||
supports_sane_rowcount_returning = False
|
||||
|
||||
execution_ctx_cls = MSExecutionContext_pyodbc
|
||||
|
||||
colspecs = util.update_copy(
|
||||
MSDialect.colspecs,
|
||||
{
|
||||
sqltypes.Numeric: _MSNumeric_pyodbc,
|
||||
sqltypes.Float: _MSFloat_pyodbc,
|
||||
BINARY: _BINARY_pyodbc,
|
||||
# support DateTime(timezone=True)
|
||||
sqltypes.DateTime: _ODBCDateTime,
|
||||
DATETIMEOFFSET: _ODBCDATETIMEOFFSET,
|
||||
# SQL Server dialect has a VARBINARY that is just to support
|
||||
# "deprecate_large_types" w/ VARBINARY(max), but also we must
|
||||
# handle the usual SQL standard VARBINARY
|
||||
VARBINARY: _VARBINARY_pyodbc,
|
||||
sqltypes.VARBINARY: _VARBINARY_pyodbc,
|
||||
sqltypes.LargeBinary: _VARBINARY_pyodbc,
|
||||
sqltypes.String: _String_pyodbc,
|
||||
sqltypes.Unicode: _Unicode_pyodbc,
|
||||
sqltypes.UnicodeText: _UnicodeText_pyodbc,
|
||||
sqltypes.JSON: _JSON_pyodbc,
|
||||
sqltypes.JSON.JSONIndexType: _JSONIndexType_pyodbc,
|
||||
sqltypes.JSON.JSONPathType: _JSONPathType_pyodbc,
|
||||
# this excludes Enum from the string/VARCHAR thing for now
|
||||
# it looks like Enum's adaptation doesn't really support the
|
||||
# String type itself having a dialect-level impl
|
||||
sqltypes.Enum: sqltypes.Enum,
|
||||
},
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
fast_executemany=False,
|
||||
use_setinputsizes=True,
|
||||
**params,
|
||||
):
|
||||
super().__init__(use_setinputsizes=use_setinputsizes, **params)
|
||||
self.use_scope_identity = (
|
||||
self.use_scope_identity
|
||||
and self.dbapi
|
||||
and hasattr(self.dbapi.Cursor, "nextset")
|
||||
)
|
||||
self._need_decimal_fix = self.dbapi and self._dbapi_version() < (
|
||||
2,
|
||||
1,
|
||||
8,
|
||||
)
|
||||
self.fast_executemany = fast_executemany
|
||||
if fast_executemany:
|
||||
self.use_insertmanyvalues_wo_returning = False
|
||||
|
||||
def _get_server_version_info(self, connection):
|
||||
try:
|
||||
# "Version of the instance of SQL Server, in the form
|
||||
# of 'major.minor.build.revision'"
|
||||
raw = connection.exec_driver_sql(
|
||||
"SELECT CAST(SERVERPROPERTY('ProductVersion') AS VARCHAR)"
|
||||
).scalar()
|
||||
except exc.DBAPIError:
|
||||
# SQL Server docs indicate this function isn't present prior to
|
||||
# 2008. Before we had the VARCHAR cast above, pyodbc would also
|
||||
# fail on this query.
|
||||
return super()._get_server_version_info(connection)
|
||||
else:
|
||||
version = []
|
||||
r = re.compile(r"[.\-]")
|
||||
for n in r.split(raw):
|
||||
try:
|
||||
version.append(int(n))
|
||||
except ValueError:
|
||||
pass
|
||||
return tuple(version)
|
||||
|
||||
def on_connect(self):
|
||||
super_ = super().on_connect()
|
||||
|
||||
def on_connect(conn):
|
||||
if super_ is not None:
|
||||
super_(conn)
|
||||
|
||||
self._setup_timestampoffset_type(conn)
|
||||
|
||||
return on_connect
|
||||
|
||||
def _setup_timestampoffset_type(self, connection):
|
||||
# output converter function for datetimeoffset
|
||||
def _handle_datetimeoffset(dto_value):
|
||||
tup = struct.unpack("<6hI2h", dto_value)
|
||||
return datetime.datetime(
|
||||
tup[0],
|
||||
tup[1],
|
||||
tup[2],
|
||||
tup[3],
|
||||
tup[4],
|
||||
tup[5],
|
||||
tup[6] // 1000,
|
||||
datetime.timezone(
|
||||
datetime.timedelta(hours=tup[7], minutes=tup[8])
|
||||
),
|
||||
)
|
||||
|
||||
odbc_SQL_SS_TIMESTAMPOFFSET = -155 # as defined in SQLNCLI.h
|
||||
connection.add_output_converter(
|
||||
odbc_SQL_SS_TIMESTAMPOFFSET, _handle_datetimeoffset
|
||||
)
|
||||
|
||||
def do_executemany(self, cursor, statement, parameters, context=None):
|
||||
if self.fast_executemany:
|
||||
cursor.fast_executemany = True
|
||||
super().do_executemany(cursor, statement, parameters, context=context)
|
||||
|
||||
def is_disconnect(self, e, connection, cursor):
|
||||
if isinstance(e, self.dbapi.Error):
|
||||
code = e.args[0]
|
||||
if code in {
|
||||
"08S01",
|
||||
"01000",
|
||||
"01002",
|
||||
"08003",
|
||||
"08007",
|
||||
"08S02",
|
||||
"08001",
|
||||
"HYT00",
|
||||
"HY010",
|
||||
"10054",
|
||||
}:
|
||||
return True
|
||||
return super().is_disconnect(e, connection, cursor)
|
||||
|
||||
|
||||
dialect = MSDialect_pyodbc
|
Loading…
Add table
Add a link
Reference in a new issue