Initial commit: Masina-Dock Vehicle Management System

This commit is contained in:
Iulian 2025-10-19 11:10:11 +01:00
commit ae923e2c41
4999 changed files with 1607266 additions and 0 deletions

View file

@ -0,0 +1,18 @@
# connectors/__init__.py
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
from ..engine.interfaces import Dialect
class Connector(Dialect):
"""Base class for dialect mixins, for DBAPIs that work
across entirely different database backends.
Currently the only such mixin is pyodbc.
"""

View file

@ -0,0 +1,184 @@
# connectors/aioodbc.py
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
# mypy: ignore-errors
from __future__ import annotations
from typing import TYPE_CHECKING
from .asyncio import AsyncAdapt_dbapi_connection
from .asyncio import AsyncAdapt_dbapi_cursor
from .asyncio import AsyncAdapt_dbapi_ss_cursor
from .asyncio import AsyncAdaptFallback_dbapi_connection
from .pyodbc import PyODBCConnector
from .. import pool
from .. import util
from ..util.concurrency import await_fallback
from ..util.concurrency import await_only
if TYPE_CHECKING:
from ..engine.interfaces import ConnectArgsType
from ..engine.url import URL
class AsyncAdapt_aioodbc_cursor(AsyncAdapt_dbapi_cursor):
__slots__ = ()
def setinputsizes(self, *inputsizes):
# see https://github.com/aio-libs/aioodbc/issues/451
return self._cursor._impl.setinputsizes(*inputsizes)
# how it's supposed to work
# return self.await_(self._cursor.setinputsizes(*inputsizes))
class AsyncAdapt_aioodbc_ss_cursor(
AsyncAdapt_aioodbc_cursor, AsyncAdapt_dbapi_ss_cursor
):
__slots__ = ()
class AsyncAdapt_aioodbc_connection(AsyncAdapt_dbapi_connection):
_cursor_cls = AsyncAdapt_aioodbc_cursor
_ss_cursor_cls = AsyncAdapt_aioodbc_ss_cursor
__slots__ = ()
@property
def autocommit(self):
return self._connection.autocommit
@autocommit.setter
def autocommit(self, value):
# https://github.com/aio-libs/aioodbc/issues/448
# self._connection.autocommit = value
self._connection._conn.autocommit = value
def ping(self, reconnect):
return self.await_(self._connection.ping(reconnect))
def add_output_converter(self, *arg, **kw):
self._connection.add_output_converter(*arg, **kw)
def character_set_name(self):
return self._connection.character_set_name()
def cursor(self, server_side=False):
# aioodbc sets connection=None when closed and just fails with
# AttributeError here. Here we use the same ProgrammingError +
# message that pyodbc uses, so it triggers is_disconnect() as well.
if self._connection.closed:
raise self.dbapi.ProgrammingError(
"Attempt to use a closed connection."
)
return super().cursor(server_side=server_side)
def rollback(self):
# aioodbc sets connection=None when closed and just fails with
# AttributeError here. should be a no-op
if not self._connection.closed:
super().rollback()
def commit(self):
# aioodbc sets connection=None when closed and just fails with
# AttributeError here. should be a no-op
if not self._connection.closed:
super().commit()
def close(self):
# aioodbc sets connection=None when closed and just fails with
# AttributeError here. should be a no-op
if not self._connection.closed:
super().close()
class AsyncAdaptFallback_aioodbc_connection(
AsyncAdaptFallback_dbapi_connection, AsyncAdapt_aioodbc_connection
):
__slots__ = ()
class AsyncAdapt_aioodbc_dbapi:
def __init__(self, aioodbc, pyodbc):
self.aioodbc = aioodbc
self.pyodbc = pyodbc
self.paramstyle = pyodbc.paramstyle
self._init_dbapi_attributes()
self.Cursor = AsyncAdapt_dbapi_cursor
self.version = pyodbc.version
def _init_dbapi_attributes(self):
for name in (
"Warning",
"Error",
"InterfaceError",
"DataError",
"DatabaseError",
"OperationalError",
"InterfaceError",
"IntegrityError",
"ProgrammingError",
"InternalError",
"NotSupportedError",
"NUMBER",
"STRING",
"DATETIME",
"BINARY",
"Binary",
"BinaryNull",
"SQL_VARCHAR",
"SQL_WVARCHAR",
):
setattr(self, name, getattr(self.pyodbc, name))
def connect(self, *arg, **kw):
async_fallback = kw.pop("async_fallback", False)
creator_fn = kw.pop("async_creator_fn", self.aioodbc.connect)
if util.asbool(async_fallback):
return AsyncAdaptFallback_aioodbc_connection(
self,
await_fallback(creator_fn(*arg, **kw)),
)
else:
return AsyncAdapt_aioodbc_connection(
self,
await_only(creator_fn(*arg, **kw)),
)
class aiodbcConnector(PyODBCConnector):
is_async = True
supports_statement_cache = True
supports_server_side_cursors = True
@classmethod
def import_dbapi(cls):
return AsyncAdapt_aioodbc_dbapi(
__import__("aioodbc"), __import__("pyodbc")
)
def create_connect_args(self, url: URL) -> ConnectArgsType:
arg, kw = super().create_connect_args(url)
if arg and arg[0]:
kw["dsn"] = arg[0]
return (), kw
@classmethod
def get_pool_class(cls, url):
async_fallback = url.query.get("async_fallback", False)
if util.asbool(async_fallback):
return pool.FallbackAsyncAdaptedQueuePool
else:
return pool.AsyncAdaptedQueuePool
def get_driver_connection(self, connection):
return connection._connection

View file

@ -0,0 +1,429 @@
# connectors/asyncio.py
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
"""generic asyncio-adapted versions of DBAPI connection and cursor"""
from __future__ import annotations
import asyncio
import collections
import sys
from typing import Any
from typing import AsyncIterator
from typing import Deque
from typing import Iterator
from typing import NoReturn
from typing import Optional
from typing import Sequence
from typing import Tuple
from typing import Type
from typing import TYPE_CHECKING
from ..engine import AdaptedConnection
from ..util import EMPTY_DICT
from ..util.concurrency import await_fallback
from ..util.concurrency import await_only
from ..util.concurrency import in_greenlet
from ..util.typing import Protocol
if TYPE_CHECKING:
from ..engine.interfaces import _DBAPICursorDescription
from ..engine.interfaces import _DBAPIMultiExecuteParams
from ..engine.interfaces import _DBAPISingleExecuteParams
from ..engine.interfaces import DBAPIModule
from ..util.typing import Self
class AsyncIODBAPIConnection(Protocol):
"""protocol representing an async adapted version of a
:pep:`249` database connection.
"""
# note that async DBAPIs dont agree if close() should be awaitable,
# so it is omitted here and picked up by the __getattr__ hook below
async def commit(self) -> None: ...
def cursor(self, *args: Any, **kwargs: Any) -> AsyncIODBAPICursor: ...
async def rollback(self) -> None: ...
def __getattr__(self, key: str) -> Any: ...
def __setattr__(self, key: str, value: Any) -> None: ...
class AsyncIODBAPICursor(Protocol):
"""protocol representing an async adapted version
of a :pep:`249` database cursor.
"""
def __aenter__(self) -> Any: ...
@property
def description(
self,
) -> _DBAPICursorDescription:
"""The description attribute of the Cursor."""
...
@property
def rowcount(self) -> int: ...
arraysize: int
lastrowid: int
async def close(self) -> None: ...
async def execute(
self,
operation: Any,
parameters: Optional[_DBAPISingleExecuteParams] = None,
) -> Any: ...
async def executemany(
self,
operation: Any,
parameters: _DBAPIMultiExecuteParams,
) -> Any: ...
async def fetchone(self) -> Optional[Any]: ...
async def fetchmany(self, size: Optional[int] = ...) -> Sequence[Any]: ...
async def fetchall(self) -> Sequence[Any]: ...
async def setinputsizes(self, sizes: Sequence[Any]) -> None: ...
def setoutputsize(self, size: Any, column: Any) -> None: ...
async def callproc(
self, procname: str, parameters: Sequence[Any] = ...
) -> Any: ...
async def nextset(self) -> Optional[bool]: ...
def __aiter__(self) -> AsyncIterator[Any]: ...
class AsyncAdapt_dbapi_module:
if TYPE_CHECKING:
Error = DBAPIModule.Error
OperationalError = DBAPIModule.OperationalError
InterfaceError = DBAPIModule.InterfaceError
IntegrityError = DBAPIModule.IntegrityError
def __getattr__(self, key: str) -> Any: ...
class AsyncAdapt_dbapi_cursor:
server_side = False
__slots__ = (
"_adapt_connection",
"_connection",
"await_",
"_cursor",
"_rows",
"_soft_closed_memoized",
)
_awaitable_cursor_close: bool = True
_cursor: AsyncIODBAPICursor
_adapt_connection: AsyncAdapt_dbapi_connection
_connection: AsyncIODBAPIConnection
_rows: Deque[Any]
def __init__(self, adapt_connection: AsyncAdapt_dbapi_connection):
self._adapt_connection = adapt_connection
self._connection = adapt_connection._connection
self.await_ = adapt_connection.await_
cursor = self._make_new_cursor(self._connection)
self._cursor = self._aenter_cursor(cursor)
self._soft_closed_memoized = EMPTY_DICT
if not self.server_side:
self._rows = collections.deque()
def _aenter_cursor(self, cursor: AsyncIODBAPICursor) -> AsyncIODBAPICursor:
return self.await_(cursor.__aenter__()) # type: ignore[no-any-return]
def _make_new_cursor(
self, connection: AsyncIODBAPIConnection
) -> AsyncIODBAPICursor:
return connection.cursor()
@property
def description(self) -> Optional[_DBAPICursorDescription]:
if "description" in self._soft_closed_memoized:
return self._soft_closed_memoized["description"] # type: ignore[no-any-return] # noqa: E501
return self._cursor.description
@property
def rowcount(self) -> int:
return self._cursor.rowcount
@property
def arraysize(self) -> int:
return self._cursor.arraysize
@arraysize.setter
def arraysize(self, value: int) -> None:
self._cursor.arraysize = value
@property
def lastrowid(self) -> int:
return self._cursor.lastrowid
async def _async_soft_close(self) -> None:
"""close the cursor but keep the results pending, and memoize the
description.
.. versionadded:: 2.0.44
"""
if not self._awaitable_cursor_close or self.server_side:
return
self._soft_closed_memoized = self._soft_closed_memoized.union(
{
"description": self._cursor.description,
}
)
await self._cursor.close()
def close(self) -> None:
self._rows.clear()
# updated as of 2.0.44
# try to "close" the cursor based on what we know about the driver
# and if we are able to. otherwise, hope that the asyncio
# extension called _async_soft_close() if the cursor is going into
# a sync context
if self._cursor is None or bool(self._soft_closed_memoized):
return
if not self._awaitable_cursor_close:
self._cursor.close() # type: ignore[unused-coroutine]
elif in_greenlet():
self.await_(self._cursor.close())
def execute(
self,
operation: Any,
parameters: Optional[_DBAPISingleExecuteParams] = None,
) -> Any:
try:
return self.await_(self._execute_async(operation, parameters))
except Exception as error:
self._adapt_connection._handle_exception(error)
def executemany(
self,
operation: Any,
seq_of_parameters: _DBAPIMultiExecuteParams,
) -> Any:
try:
return self.await_(
self._executemany_async(operation, seq_of_parameters)
)
except Exception as error:
self._adapt_connection._handle_exception(error)
async def _execute_async(
self, operation: Any, parameters: Optional[_DBAPISingleExecuteParams]
) -> Any:
async with self._adapt_connection._execute_mutex:
if parameters is None:
result = await self._cursor.execute(operation)
else:
result = await self._cursor.execute(operation, parameters)
if self._cursor.description and not self.server_side:
self._rows = collections.deque(await self._cursor.fetchall())
return result
async def _executemany_async(
self,
operation: Any,
seq_of_parameters: _DBAPIMultiExecuteParams,
) -> Any:
async with self._adapt_connection._execute_mutex:
return await self._cursor.executemany(operation, seq_of_parameters)
def nextset(self) -> None:
self.await_(self._cursor.nextset())
if self._cursor.description and not self.server_side:
self._rows = collections.deque(
self.await_(self._cursor.fetchall())
)
def setinputsizes(self, *inputsizes: Any) -> None:
# NOTE: this is overrridden in aioodbc due to
# see https://github.com/aio-libs/aioodbc/issues/451
# right now
return self.await_(self._cursor.setinputsizes(*inputsizes))
def __enter__(self) -> Self:
return self
def __exit__(self, type_: Any, value: Any, traceback: Any) -> None:
self.close()
def __iter__(self) -> Iterator[Any]:
while self._rows:
yield self._rows.popleft()
def fetchone(self) -> Optional[Any]:
if self._rows:
return self._rows.popleft()
else:
return None
def fetchmany(self, size: Optional[int] = None) -> Sequence[Any]:
if size is None:
size = self.arraysize
rr = self._rows
return [rr.popleft() for _ in range(min(size, len(rr)))]
def fetchall(self) -> Sequence[Any]:
retval = list(self._rows)
self._rows.clear()
return retval
class AsyncAdapt_dbapi_ss_cursor(AsyncAdapt_dbapi_cursor):
__slots__ = ()
server_side = True
def close(self) -> None:
if self._cursor is not None:
self.await_(self._cursor.close())
self._cursor = None # type: ignore
def fetchone(self) -> Optional[Any]:
return self.await_(self._cursor.fetchone())
def fetchmany(self, size: Optional[int] = None) -> Any:
return self.await_(self._cursor.fetchmany(size=size))
def fetchall(self) -> Sequence[Any]:
return self.await_(self._cursor.fetchall())
def __iter__(self) -> Iterator[Any]:
iterator = self._cursor.__aiter__()
while True:
try:
yield self.await_(iterator.__anext__())
except StopAsyncIteration:
break
class AsyncAdapt_dbapi_connection(AdaptedConnection):
_cursor_cls = AsyncAdapt_dbapi_cursor
_ss_cursor_cls = AsyncAdapt_dbapi_ss_cursor
await_ = staticmethod(await_only)
__slots__ = ("dbapi", "_execute_mutex")
_connection: AsyncIODBAPIConnection
def __init__(self, dbapi: Any, connection: AsyncIODBAPIConnection):
self.dbapi = dbapi
self._connection = connection
self._execute_mutex = asyncio.Lock()
def cursor(self, server_side: bool = False) -> AsyncAdapt_dbapi_cursor:
if server_side:
return self._ss_cursor_cls(self)
else:
return self._cursor_cls(self)
def execute(
self,
operation: Any,
parameters: Optional[_DBAPISingleExecuteParams] = None,
) -> Any:
"""lots of DBAPIs seem to provide this, so include it"""
cursor = self.cursor()
cursor.execute(operation, parameters)
return cursor
def _handle_exception(self, error: Exception) -> NoReturn:
exc_info = sys.exc_info()
raise error.with_traceback(exc_info[2])
def rollback(self) -> None:
try:
self.await_(self._connection.rollback())
except Exception as error:
self._handle_exception(error)
def commit(self) -> None:
try:
self.await_(self._connection.commit())
except Exception as error:
self._handle_exception(error)
def close(self) -> None:
self.await_(self._connection.close())
class AsyncAdaptFallback_dbapi_connection(AsyncAdapt_dbapi_connection):
__slots__ = ()
await_ = staticmethod(await_fallback)
class AsyncAdapt_terminate:
"""Mixin for a AsyncAdapt_dbapi_connection to add terminate support."""
__slots__ = ()
def terminate(self) -> None:
if in_greenlet():
# in a greenlet; this is the connection was invalidated case.
try:
# try to gracefully close; see #10717
self.await_(asyncio.shield(self._terminate_graceful_close())) # type: ignore[attr-defined] # noqa: E501
except self._terminate_handled_exceptions() as e:
# in the case where we are recycling an old connection
# that may have already been disconnected, close() will
# fail. In this case, terminate
# the connection without any further waiting.
# see issue #8419
self._terminate_force_close()
if isinstance(e, asyncio.CancelledError):
# re-raise CancelledError if we were cancelled
raise
else:
# not in a greenlet; this is the gc cleanup case
self._terminate_force_close()
def _terminate_handled_exceptions(self) -> Tuple[Type[BaseException], ...]:
"""Returns the exceptions that should be handled when
calling _graceful_close.
"""
return (asyncio.TimeoutError, asyncio.CancelledError, OSError)
async def _terminate_graceful_close(self) -> None:
"""Try to close connection gracefully"""
raise NotImplementedError
def _terminate_force_close(self) -> None:
"""Terminate the connection"""
raise NotImplementedError

View file

@ -0,0 +1,250 @@
# connectors/pyodbc.py
# Copyright (C) 2005-2025 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
from __future__ import annotations
import re
import typing
from typing import Any
from typing import Dict
from typing import List
from typing import Optional
from typing import Tuple
from typing import Union
from urllib.parse import unquote_plus
from . import Connector
from .. import ExecutionContext
from .. import pool
from .. import util
from ..engine import ConnectArgsType
from ..engine import Connection
from ..engine import interfaces
from ..engine import URL
from ..sql.type_api import TypeEngine
if typing.TYPE_CHECKING:
from ..engine.interfaces import DBAPIModule
from ..engine.interfaces import IsolationLevel
class PyODBCConnector(Connector):
driver = "pyodbc"
# this is no longer False for pyodbc in general
supports_sane_rowcount_returning = True
supports_sane_multi_rowcount = False
supports_native_decimal = True
default_paramstyle = "named"
fast_executemany = False
# for non-DSN connections, this *may* be used to
# hold the desired driver name
pyodbc_driver_name: Optional[str] = None
def __init__(self, use_setinputsizes: bool = False, **kw: Any):
super().__init__(**kw)
if use_setinputsizes:
self.bind_typing = interfaces.BindTyping.SETINPUTSIZES
@classmethod
def import_dbapi(cls) -> DBAPIModule:
return __import__("pyodbc")
def create_connect_args(self, url: URL) -> ConnectArgsType:
opts = url.translate_connect_args(username="user")
opts.update(url.query)
keys = opts
query = url.query
connect_args: Dict[str, Any] = {}
connectors: List[str]
for param in ("ansi", "unicode_results", "autocommit"):
if param in keys:
connect_args[param] = util.asbool(keys.pop(param))
if "odbc_connect" in keys:
connectors = [unquote_plus(keys.pop("odbc_connect"))]
else:
def check_quote(token: str) -> str:
if ";" in str(token) or str(token).startswith("{"):
token = "{%s}" % token.replace("}", "}}")
return token
keys = {k: check_quote(v) for k, v in keys.items()}
dsn_connection = "dsn" in keys or (
"host" in keys and "database" not in keys
)
if dsn_connection:
connectors = [
"dsn=%s" % (keys.pop("host", "") or keys.pop("dsn", ""))
]
else:
port = ""
if "port" in keys and "port" not in query:
port = ",%d" % int(keys.pop("port"))
connectors = []
driver = keys.pop("driver", self.pyodbc_driver_name)
if driver is None and keys:
# note if keys is empty, this is a totally blank URL
util.warn(
"No driver name specified; "
"this is expected by PyODBC when using "
"DSN-less connections"
)
else:
connectors.append("DRIVER={%s}" % driver)
connectors.extend(
[
"Server=%s%s" % (keys.pop("host", ""), port),
"Database=%s" % keys.pop("database", ""),
]
)
user = keys.pop("user", None)
if user:
connectors.append("UID=%s" % user)
pwd = keys.pop("password", "")
if pwd:
connectors.append("PWD=%s" % pwd)
else:
authentication = keys.pop("authentication", None)
if authentication:
connectors.append("Authentication=%s" % authentication)
else:
connectors.append("Trusted_Connection=Yes")
# if set to 'Yes', the ODBC layer will try to automagically
# convert textual data from your database encoding to your
# client encoding. This should obviously be set to 'No' if
# you query a cp1253 encoded database from a latin1 client...
if "odbc_autotranslate" in keys:
connectors.append(
"AutoTranslate=%s" % keys.pop("odbc_autotranslate")
)
connectors.extend(["%s=%s" % (k, v) for k, v in keys.items()])
return ((";".join(connectors),), connect_args)
def is_disconnect(
self,
e: Exception,
connection: Optional[
Union[pool.PoolProxiedConnection, interfaces.DBAPIConnection]
],
cursor: Optional[interfaces.DBAPICursor],
) -> bool:
if isinstance(e, self.loaded_dbapi.ProgrammingError):
return "The cursor's connection has been closed." in str(
e
) or "Attempt to use a closed connection." in str(e)
else:
return False
def _dbapi_version(self) -> interfaces.VersionInfoType:
if not self.dbapi:
return ()
return self._parse_dbapi_version(self.dbapi.version)
def _parse_dbapi_version(self, vers: str) -> interfaces.VersionInfoType:
m = re.match(r"(?:py.*-)?([\d\.]+)(?:-(\w+))?", vers)
if not m:
return ()
vers_tuple: interfaces.VersionInfoType = tuple(
[int(x) for x in m.group(1).split(".")]
)
if m.group(2):
vers_tuple += (m.group(2),)
return vers_tuple
def _get_server_version_info(
self, connection: Connection
) -> interfaces.VersionInfoType:
# NOTE: this function is not reliable, particularly when
# freetds is in use. Implement database-specific server version
# queries.
dbapi_con = connection.connection.dbapi_connection
version: Tuple[Union[int, str], ...] = ()
r = re.compile(r"[.\-]")
for n in r.split(dbapi_con.getinfo(self.dbapi.SQL_DBMS_VER)): # type: ignore[union-attr] # noqa: E501
try:
version += (int(n),)
except ValueError:
pass
return tuple(version)
def do_set_input_sizes(
self,
cursor: interfaces.DBAPICursor,
list_of_tuples: List[Tuple[str, Any, TypeEngine[Any]]],
context: ExecutionContext,
) -> None:
# the rules for these types seems a little strange, as you can pass
# non-tuples as well as tuples, however it seems to assume "0"
# for the subsequent values if you don't pass a tuple which fails
# for types such as pyodbc.SQL_WLONGVARCHAR, which is the datatype
# that ticket #5649 is targeting.
# NOTE: as of #6058, this won't be called if the use_setinputsizes
# parameter were not passed to the dialect, or if no types were
# specified in list_of_tuples
# as of #8177 for 2.0 we assume use_setinputsizes=True and only
# omit the setinputsizes calls for .executemany() with
# fast_executemany=True
if (
context.execute_style is interfaces.ExecuteStyle.EXECUTEMANY
and self.fast_executemany
):
return
cursor.setinputsizes(
[
(
(dbtype, None, None)
if not isinstance(dbtype, tuple)
else dbtype
)
for key, dbtype, sqltype in list_of_tuples
]
)
def get_isolation_level_values(
self, dbapi_conn: interfaces.DBAPIConnection
) -> List[IsolationLevel]:
return [*super().get_isolation_level_values(dbapi_conn), "AUTOCOMMIT"]
def set_isolation_level(
self,
dbapi_connection: interfaces.DBAPIConnection,
level: IsolationLevel,
) -> None:
# adjust for ConnectionFairy being present
# allows attribute set e.g. "connection.autocommit = True"
# to work properly
if level == "AUTOCOMMIT":
dbapi_connection.autocommit = True
else:
dbapi_connection.autocommit = False
super().set_isolation_level(dbapi_connection, level)
def detect_autocommit_setting(
self, dbapi_conn: interfaces.DBAPIConnection
) -> bool:
return bool(dbapi_conn.autocommit)