import ast
import itertools
import re
import types
from _typeshed import Incomplete
from collections import defaultdict
from collections.abc import Callable, Generator
from logging import Logger
from typing import Literal, NoReturn, TypeVar
from typing_extensions import Self, TypeAlias, deprecated

import pony as pony
from pony.orm.asttranslation import TranslationError as TranslationError
from pony.orm.dbapiprovider import (
    DatabaseError as DatabaseError,
    DataError as DataError,
    DBException as DBException,
    Error as Error,
    IntegrityError as IntegrityError,
    InterfaceError as InterfaceError,
    InternalError as InternalError,
    NotSupportedError as NotSupportedError,
    OperationalError as OperationalError,
    ProgrammingError as ProgrammingError,
    Warning as Warning,
)
from pony.orm.ormtypes import (
    Array,
    FloatArray as FloatArray,
    IntArray as IntArray,
    Json as Json,
    LongStr as LongStr,
    LongUnicode as LongUnicode,
    StrArray as StrArray,
    raw_sql as raw_sql,
)
from pony.py23compat import buffer as buffer, unicode as unicode
from pony.utils import between as between, coalesce as coalesce, concat as concat, localbase

_T = TypeVar("_T")
_KnownProvider: TypeAlias = Literal["sqlite", "postgres", "mysql", "oracle"]

__all__ = [
    "pony",
    "DBException",
    "RowNotFound",
    "MultipleRowsFound",
    "TooManyRowsFound",
    "Warning",
    "Error",
    "InterfaceError",
    "DatabaseError",
    "DataError",
    "OperationalError",
    "IntegrityError",
    "InternalError",
    "ProgrammingError",
    "NotSupportedError",
    "OrmError",
    "ERDiagramError",
    "DBSchemaError",
    "MappingError",
    "BindingError",
    "TableDoesNotExist",
    "TableIsNotEmpty",
    "ConstraintError",
    "CacheIndexError",
    "ObjectNotFound",
    "MultipleObjectsFoundError",
    "TooManyObjectsFoundError",
    "OperationWithDeletedObjectError",
    "TransactionError",
    "ConnectionClosedError",
    "TransactionIntegrityError",
    "IsolationError",
    "CommitException",
    "RollbackException",
    "UnrepeatableReadError",
    "OptimisticCheckError",
    "UnresolvableCyclicDependency",
    "UnexpectedError",
    "DatabaseSessionIsOver",
    "PonyRuntimeWarning",
    "DatabaseContainsIncorrectValue",
    "DatabaseContainsIncorrectEmptyValue",
    "TranslationError",
    "ExprEvalError",
    "PermissionError",
    "Database",
    "sql_debug",
    "set_sql_debug",
    "sql_debugging",
    "show",
    "PrimaryKey",
    "Required",
    "Optional",
    "Set",
    "Discriminator",
    "composite_key",
    "composite_index",
    "flush",
    "commit",
    "rollback",
    "db_session",
    "with_transaction",
    "make_proxy",
    "LongStr",
    "LongUnicode",
    "Json",
    "IntArray",
    "StrArray",
    "FloatArray",
    "select",
    "left_join",
    "get",
    "exists",
    "delete",
    "count",
    "sum",
    "min",
    "max",
    "avg",
    "group_concat",
    "distinct",
    "JOIN",
    "desc",
    "between",
    "concat",
    "coalesce",
    "raw_sql",
    "buffer",
    "unicode",
    "get_current_user",
    "set_current_user",
    "perm",
    "has_perm",
    "get_user_groups",
    "get_user_roles",
    "get_object_labels",
    "user_groups_getter",
    "user_roles_getter",
    "obj_labels_getter",
]

suppress_debug_change: bool

def sql_debug(value: bool) -> None: ...
def set_sql_debug(debug: bool = True, show_values=None) -> None: ...

orm_logger: Logger
sql_logger: Logger
orm_log_level: int

def log_orm(msg: object) -> None: ...
def args2str(args: list[object] | tuple[object] | dict[object, object]) -> str: ...

class OrmError(Exception): ...
class ERDiagramError(OrmError): ...
class DBSchemaError(OrmError): ...
class MappingError(OrmError): ...
class BindingError(OrmError): ...
class TableDoesNotExist(OrmError): ...
class TableIsNotEmpty(OrmError): ...
class ConstraintError(OrmError): ...
class CacheIndexError(OrmError): ...
class RowNotFound(OrmError): ...
class MultipleRowsFound(OrmError): ...
class TooManyRowsFound(OrmError): ...
class PermissionError(OrmError): ...

class ObjectNotFound(OrmError):
    def __init__(
        exc, entity: Entity, pkval: object | tuple[object, ...] | None = None  # pkval passing to repr() builtins function
    ) -> None: ...

class MultipleObjectsFoundError(OrmError): ...
class TooManyObjectsFoundError(OrmError): ...
class OperationWithDeletedObjectError(OrmError): ...
class TransactionError(OrmError): ...
class ConnectionClosedError(TransactionError): ...

class TransactionIntegrityError(TransactionError):
    def __init__(exc, msg, original_exc=None) -> None: ...

class CommitException(TransactionError):
    def __init__(exc, msg, exceptions) -> None: ...

class PartialCommitException(TransactionError):
    def __init__(exc, msg, exceptions) -> None: ...

class RollbackException(TransactionError):
    def __init__(exc, msg, exceptions) -> None: ...

class DatabaseSessionIsOver(TransactionError): ...

TransactionRolledBack = DatabaseSessionIsOver

class IsolationError(TransactionError): ...
class UnrepeatableReadError(IsolationError): ...
class OptimisticCheckError(IsolationError): ...
class UnresolvableCyclicDependency(TransactionError): ...

class UnexpectedError(TransactionError):
    def __init__(exc, msg, original_exc) -> None: ...

class ExprEvalError(TranslationError):
    def __init__(exc, src, cause) -> None: ...

class PonyInternalException(Exception): ...
class OptimizationFailed(PonyInternalException): ...

class UseAnotherTranslator(PonyInternalException):
    translator: Incomplete
    def __init__(self, translator) -> None: ...

class PonyRuntimeWarning(RuntimeWarning): ...
class DatabaseContainsIncorrectValue(PonyRuntimeWarning): ...
class DatabaseContainsIncorrectEmptyValue(DatabaseContainsIncorrectValue): ...

class PrefetchContext:
    database: Incomplete
    attrs_to_prefetch_dict: Incomplete
    entities_to_prefetch: Incomplete
    relations_to_prefetch_cache: Incomplete
    def __init__(self, database=None) -> None: ...
    def copy(self): ...
    def __enter__(self) -> None: ...
    def __exit__(
        self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: types.TracebackType | None
    ) -> None: ...
    def get_frozen_attrs_to_prefetch(self, entity): ...
    def get_relations_to_prefetch(self, entity): ...

class Local(localbase):
    def __init__(local) -> None: ...
    @property
    def prefetch_context(local): ...
    def push_debug_state(local, debug, show_values) -> None: ...
    def pop_debug_state(local) -> None: ...

local: Local

def flush() -> None: ...
def commit() -> None: ...
def rollback() -> None: ...

class DBSessionContextManager:
    __slots__ = (
        "retry",
        "retry_exceptions",
        "allowed_exceptions",
        "immediate",
        "ddl",
        "serializable",
        "strict",
        "optimistic",
        "sql_debug",
        "show_values",
    )
    retry: int
    ddl: bool
    serializable: bool
    immediate: bool
    strict: bool
    optimistic: bool
    retry_exceptions: tuple[type[Exception], ...]
    allowed_exceptions: tuple[type[Exception], ...]
    sql_debug: bool | None
    show_values: bool | None
    def __init__(
        db_session,
        retry: int = 0,
        immediate: bool = False,
        ddl: bool = False,
        serializable: bool = False,
        strict: bool = False,
        optimistic: bool = True,
        retry_exceptions: tuple[type[Exception], ...] = ...,
        allowed_exceptions: tuple[type[Exception], ...] = (),
        sql_debug: bool | None = None,
        show_values: bool | None = None,
    ) -> None: ...
    def __call__(db_session, *args, **kwargs): ...
    def __enter__(db_session) -> None: ...
    def __exit__(db_session, exc_type=None, exc=None, tb=None) -> None: ...

db_session: DBSessionContextManager

class SQLDebuggingContextManager:
    debug: bool
    show_values: Incomplete
    def __init__(self, debug: bool = True, show_values=None) -> None: ...
    def __call__(self, *args, **kwargs): ...
    def __enter__(self) -> None: ...
    def __exit__(self, exc_type=None, exc=None, tb=None) -> None: ...

sql_debugging: SQLDebuggingContextManager

def throw_db_session_is_over(action: str, obj: Entity, attr: Attribute | None = None) -> NoReturn: ...
@deprecated("@with_transaction decorator is deprecated, use @db_session decorator instead.")
def with_transaction(*args, **kwargs): ...

known_providers: tuple[_KnownProvider, ...]

class OnConnectDecorator:
    @staticmethod
    def check_provider(provider: str | None) -> None: ...
    provider: _KnownProvider | None
    database: Incomplete
    def __init__(self, database: Database, provider: str | None) -> None: ...
    def __call__(self, func: types.FunctionType | None = None, provider: str | None = None) -> Self: ...

db_id_counter: itertools.count[int]

class Database:
    def __deepcopy__(self, memo) -> Self: ...
    id: Incomplete
    priority: int
    entities: Incomplete
    schema: Incomplete
    Entity: type[Entity]
    on_connect: OnConnectDecorator
    provider: Incomplete
    def __init__(self, *args, **kwargs) -> None: ...
    def call_on_connect(database, con) -> None: ...
    def bind(self, *args, **kwargs) -> None: ...
    @property
    def last_sql(database): ...
    @property
    def local_stats(database): ...
    def merge_local_stats(database) -> None: ...
    @property
    def global_stats(database): ...
    @property
    @deprecated("global_stats_lock is deprecated, just use global_stats property without any locking.")
    def global_stats_lock(database): ...
    def get_connection(database): ...
    def disconnect(database) -> None: ...
    def flush(database) -> None: ...
    def commit(database) -> None: ...
    def rollback(database) -> None: ...
    def execute(database, sql, globals=None, locals=None): ...
    def select(database, sql, globals=None, locals=None, frame_depth: int = 0): ...
    def get(database, sql, globals=None, locals=None): ...
    def exists(database, sql, globals=None, locals=None): ...
    def insert(database, table_name, returning=None, **kwargs): ...
    def generate_mapping(database, filename=None, check_tables: bool = True, create_tables: bool = False): ...
    def drop_table(database, table_name, if_exists: bool = False, with_all_data: bool = False) -> None: ...
    def drop_all_tables(database, with_all_data: bool = False) -> None: ...
    def create_tables(database, check_tables: bool = False) -> None: ...
    def check_tables(database) -> None: ...
    def set_perms_for(database, *entities) -> Generator[None]: ...
    def to_json(database, data, include=(), exclude=(), converter=None, with_schema: bool = True, schema_hash=None): ...
    def from_json(database, changes, observer=None): ...

def basic_converter(x): ...
def perm(*args, **kwargs) -> AccessRule: ...
def pop_names_from_kwargs(typename, kwargs, *kwnames): ...

class AccessRule:
    def __init__(rule, database, entities, permissions, groups, roles, labels) -> None: ...
    def exclude(rule, *args) -> None: ...

def has_perm(user, perm, x) -> bool: ...
def can_view(user, x) -> bool: ...
def can_edit(user, x) -> bool: ...
def can_create(user, x) -> bool: ...
def can_delete(user, x) -> bool: ...
def get_current_user(): ...
def set_current_user(user) -> None: ...

anybody_frozenset: frozenset[str]

def get_user_groups(user): ...
def get_user_roles(user, obj): ...
def get_object_labels(obj): ...

usergroup_functions: list[Incomplete]

def user_groups_getter(cls=None): ...

userrole_functions: list[Incomplete]

def user_roles_getter(user_cls=None, obj_cls=None): ...

objlabel_functions: list[Incomplete]

def obj_labels_getter(cls=None): ...

class DbLocal(localbase):
    stats: Incomplete
    last_sql: Incomplete
    def __init__(dblocal) -> None: ...

class QueryStat:
    def __init__(stat, sql, duration=None) -> None: ...
    def copy(stat): ...
    def query_executed(stat, duration) -> None: ...
    def merge(stat, stat2) -> None: ...
    @property
    def avg_time(stat): ...

num_counter: itertools.count[int]

class SessionCache:
    is_alive: bool
    num: int
    database: Database
    objects: set[Incomplete]
    indexes: defaultdict[Incomplete, dict[Incomplete, Incomplete]] | None
    seeds: defaultdict[Incomplete, set[Incomplete]] | None
    max_id_cache: dict[Incomplete, Incomplete] | None
    collection_statistics: dict[Incomplete, Incomplete] | None
    for_update: set[Incomplete] | None
    noflush_counter: int
    modified_collections: defaultdict[Incomplete, set[Incomplete]] | None
    objects_to_save: list[Incomplete] | None
    saved_objects: list[Incomplete] | None
    query_results: dict[Incomplete, Incomplete] | None
    dbvals_deduplication_cache: defaultdict[Incomplete, dict[Incomplete, Incomplete]] | None
    modified: bool
    db_session: Incomplete
    immediate: bool
    connection: Incomplete
    in_transaction: bool
    saved_fk_state: Incomplete
    perm_cache: Incomplete
    user_roles_cache: defaultdict[Incomplete, dict[Incomplete, Incomplete]] | None
    obj_labels_cache: dict[Incomplete, Incomplete] | None
    def __init__(cache, database: Database) -> None: ...
    def connect(cache): ...
    def reconnect(cache, exc): ...
    def prepare_connection_for_query_execution(cache): ...
    def flush_and_commit(cache) -> None: ...
    def commit(cache) -> None: ...
    def rollback(cache) -> None: ...
    def release(cache) -> None: ...
    def close(cache, rollback: bool = True) -> None: ...
    def flush_disabled(cache) -> Generator[None]: ...
    def flush(cache) -> None: ...
    def call_after_save_hooks(cache) -> None: ...
    def update_simple_index(cache, obj, attr, old_val, new_val, undo) -> None: ...
    def db_update_simple_index(cache, obj, attr, old_dbval, new_dbval) -> None: ...
    def update_composite_index(cache, obj, attrs, prev_vals, new_vals, undo) -> None: ...
    def db_update_composite_index(cache, obj, attrs, prev_vals, new_vals) -> None: ...

class NotLoadedValueType: ...

NOT_LOADED: NotLoadedValueType

class DefaultValueType: ...

DEFAULT: DefaultValueType

class DescWrapper:
    attr: Attribute
    def __init__(self, attr: Attribute) -> None: ...
    def __call__(self) -> Self: ...
    def __eq__(self, other: object) -> bool: ...
    def __ne__(self, other: object) -> bool: ...
    def __hash__(self) -> int: ...

attr_id_counter: itertools.count[int]

class Attribute:
    __slots__ = (
        "nullable",
        "is_required",
        "is_discriminator",
        "is_unique",
        "is_part_of_unique_index",
        "is_pk",
        "is_collection",
        "is_relation",
        "is_basic",
        "is_string",
        "is_volatile",
        "is_implicit",
        "id",
        "pk_offset",
        "pk_columns_offset",
        "py_type",
        "sql_type",
        "entity",
        "name",
        "lazy",
        "lazy_sql_cache",
        "args",
        "auto",
        "default",
        "reverse",
        "composite_keys",
        "column",
        "columns",
        "col_paths",
        "_columns_checked",
        "converters",
        "kwargs",
        "cascade_delete",
        "index",
        "reverse_index",
        "original_default",
        "sql_default",
        "py_check",
        "hidden",
        "optimistic",
        "fk_name",
        "type_has_empty_value",
        "interleave",
    )
    nullable: bool | None
    is_required: bool
    is_discriminator: bool
    is_unique: bool | None
    is_part_of_unique_index: bool | None
    is_pk: bool
    is_collection: bool
    is_relation: bool
    is_basic: bool
    is_string: bool
    is_volatile: bool
    is_implicit: bool
    id: int
    pk_offset: int | None
    pk_columns_offset: int
    py_type: type | str | types.FunctionType | Array
    sql_type: Incomplete
    entity: Incomplete
    name: Incomplete
    lazy: bool
    lazy_sql_cache: Incomplete
    args: tuple[Incomplete, ...]
    auto: bool
    default: Incomplete
    reverse: str | Attribute | None
    composite_keys: list[tuple[Incomplete, int]]
    column: str | None
    columns: list[str] | tuple[str, ...]
    col_paths: list[Incomplete]
    converters: list[Incomplete]
    kwargs: dict[str, Incomplete]
    cascade_delete: bool | None
    index: str | bool | None
    reverse_index: Incomplete
    original_default: Incomplete
    sql_default: str | bool | None
    py_check: Callable[..., bool] | None
    hidden: bool
    optimistic: bool | None
    fk_name: str | None
    type_has_empty_value: bool
    interleave: bool | None
    def __deepcopy__(attr, memo): ...
    def __init__(attr, py_type: type | str | types.FunctionType | Array, *args, **kwargs) -> None: ...
    def linked(attr) -> None: ...
    def __lt__(attr, other): ...
    def validate(attr, val, obj=None, entity=None, from_db: bool = False): ...
    def parse_value(attr, row, offsets, dbvals_deduplication_cache): ...
    def load(attr, obj: Entity): ...
    def __get__(attr, obj, cls=None): ...
    def get(attr, obj): ...
    def __set__(attr, obj, new_val, undo_funcs=None) -> None: ...
    def db_set(attr, obj, new_dbval, is_reverse_call: bool = False) -> None: ...
    def update_reverse(attr, obj, old_val, new_val, undo_funcs) -> None: ...
    def db_update_reverse(attr, obj, old_dbval, new_dbval) -> None: ...
    def __delete__(attr, obj) -> None: ...
    def get_raw_values(attr, val): ...
    def get_columns(attr) -> list[str] | tuple[str, ...]: ...
    @property
    def asc(attr) -> Self: ...
    @property
    def desc(attr) -> DescWrapper: ...
    def describe(attr) -> str: ...

class Optional(Attribute):
    __slots__: list[str] = []

class Required(Attribute):
    __slots__: list[str] = []
    def validate(attr, val, obj=None, entity=None, from_db: bool = False): ...

class Discriminator(Required):
    __slots__ = ["code2cls"]
    code2cls: dict[Incomplete, Incomplete]
    def __init__(attr, py_type, *args, **kwargs) -> None: ...
    @staticmethod
    def create_default_attr(entity) -> None: ...
    def process_entity_inheritance(attr, entity) -> None: ...
    def validate(attr, val, obj=None, entity=None, from_db: bool = False): ...
    def load(attr, obj) -> None: ...
    def __get__(attr, obj, cls=None): ...
    def __set__(attr, obj, new_val) -> None: ...  # type: ignore[override]
    def db_set(attr, obj, new_dbval) -> None: ...  # type: ignore[override]
    def update_reverse(attr, obj, old_val, new_val, undo_funcs) -> None: ...

class Index:
    __slots__ = ("entity", "attrs", "is_pk", "is_unique")
    entity: Incomplete
    attrs: list[Incomplete]
    is_pk: bool
    is_unique: bool
    def __init__(index, *attrs, **options) -> None: ...

def composite_index(*attrs) -> None: ...
def composite_key(*attrs) -> None: ...

class PrimaryKey(Required):
    __slots__: list[str] = []
    def __new__(cls, *args, **kwargs): ...

class Collection(Attribute):
    __slots__ = (
        "table",
        "wrapper_class",
        "symmetric",
        "reverse_column",
        "reverse_columns",
        "nplus1_threshold",
        "cached_load_sql",
        "cached_add_m2m_sql",
        "cached_remove_m2m_sql",
        "cached_count_sql",
        "cached_empty_sql",
        "reverse_fk_name",
    )
    table: str | list[str] | tuple[str, ...] | None
    wrapper_class: Incomplete
    symmetric: bool
    reverse_column: Incomplete
    reverse_columns: Incomplete
    nplus1_threshold: int
    cached_load_sql: dict[int, Incomplete]
    cached_add_m2m_sql: tuple[Incomplete, Incomplete] | None
    cached_remove_m2m_sql: tuple[Incomplete, Incomplete] | None
    cached_count_sql: tuple[Incomplete, Incomplete] | None
    cached_empty_sql: tuple[Incomplete, Incomplete, Incomplete] | None
    reverse_fk_name: Incomplete
    def __init__(attr, py_type, *args, **kwargs) -> None: ...
    def load(attr, obj) -> None: ...
    def __get__(attr, obj, cls=None) -> None: ...
    def __set__(attr, obj, val) -> None: ...  # type: ignore[override]
    def __delete__(attr, obj) -> None: ...
    def prepare(attr, obj, val, fromdb: bool = False) -> None: ...
    def set(attr, obj, val, fromdb: bool = False) -> None: ...

class SetData(set[Incomplete]):
    __slots__ = ("is_fully_loaded", "added", "removed", "absent", "count")
    is_fully_loaded: bool
    added: Incomplete
    removed: Incomplete
    absent: Incomplete
    count: int | None
    def __init__(setdata) -> None: ...

def construct_batchload_criteria_list(
    alias, columns, converters, batch_size, row_value_syntax, start: int = 0, from_seeds: bool = True
): ...

class Set(Collection):
    __slots__: list[str] = []
    def validate(attr, val, obj=None, entity=None, from_db: bool = False): ...
    def prefetch_load_all(attr, objects): ...
    def load(attr, obj, items=None): ...
    def construct_sql_m2m(attr, batch_size: int = 1, items_count: int = 0): ...
    def copy(attr, obj): ...
    def __get__(attr, obj, cls=None): ...
    def __set__(attr, obj, new_items, undo_funcs=None) -> None: ...
    def __delete__(attr, obj) -> None: ...
    def reverse_add(attr, objects, item, undo_funcs) -> None: ...
    def db_reverse_add(attr, objects, item) -> None: ...
    def reverse_remove(attr, objects, item, undo_funcs) -> None: ...
    def db_reverse_remove(attr, objects, item) -> None: ...
    def get_m2m_columns(attr, is_reverse: bool = False): ...
    def remove_m2m(attr, removed) -> None: ...
    def add_m2m(attr, added) -> None: ...
    def drop_table(attr, with_all_data: bool = False) -> None: ...

def unpickle_setwrapper(obj, attrname, items): ...

class SetIterator:
    def __init__(self, wrapper) -> None: ...
    def __iter__(self): ...
    def next(self): ...
    __next__ = next

class SetInstance:
    __slots__ = ("_obj_", "_attr_", "_attrnames_")
    def __init__(wrapper, obj, attr) -> None: ...
    def __reduce__(wrapper): ...
    def copy(wrapper): ...
    def __nonzero__(wrapper): ...
    def is_empty(wrapper): ...
    def __len__(wrapper) -> int: ...
    def count(wrapper): ...
    def __iter__(wrapper): ...
    def __eq__(wrapper, other): ...
    def __ne__(wrapper, other): ...
    def __add__(wrapper, new_items): ...
    def __sub__(wrapper, items): ...
    def __contains__(wrapper, item) -> bool: ...
    def create(wrapper, **kwargs): ...
    def add(wrapper, new_items) -> None: ...
    def __iadd__(wrapper, items): ...
    def remove(wrapper, items) -> None: ...
    def __isub__(wrapper, items): ...
    def clear(wrapper) -> None: ...
    def load(wrapper) -> None: ...
    def select(wrapper, *args, **kwargs): ...
    filter = select
    def limit(wrapper, limit=None, offset=None): ...
    def page(wrapper, pagenum, pagesize: int = 10): ...
    def order_by(wrapper, *args): ...
    def sort_by(wrapper, *args): ...
    def random(wrapper, limit): ...

def unpickle_multiset(obj, attrnames, items): ...

class Multiset:
    __slots__ = ["_obj_", "_attrnames_", "_items_"]
    def __init__(multiset, obj, attrnames, items) -> None: ...
    def __reduce__(multiset): ...
    def distinct(multiset): ...
    def __nonzero__(multiset): ...
    def __len__(multiset) -> int: ...
    def __iter__(multiset): ...
    def __eq__(multiset, other): ...
    def __ne__(multiset, other): ...
    def __contains__(multiset, item) -> bool: ...

class EntityIter:
    entity: Incomplete
    def __init__(self, entity) -> None: ...
    def next(self) -> None: ...
    __next__ = next

entity_id_counter: itertools.count[int]
new_instance_id_counter: itertools.count[int]
select_re: re.Pattern[str]
lambda_re: re.Pattern[str]

class EntityMeta(type):
    def __new__(meta, name, bases, cls_dict): ...
    def __init__(entity, name, bases, cls_dict) -> None: ...
    def __iter__(entity): ...
    def __getitem__(entity, key): ...
    def exists(entity, *args, **kwargs): ...
    def get(entity, *args, **kwargs): ...
    def get_for_update(entity, *args, **kwargs): ...
    def get_by_sql(entity, sql, globals=None, locals=None): ...
    def select(entity, *args, **kwargs): ...
    def select_by_sql(entity, sql, globals=None, locals=None): ...
    def select_random(entity, limit): ...
    def describe(entity) -> str: ...
    def drop_table(entity, with_all_data: bool = False) -> None: ...

def populate_criteria_list(
    criteria_list, columns, converters, operations, params_count: int = 0, table_alias=None, optimistic: bool = False
) -> int: ...

statuses: set[str]
del_statuses: set[str]
created_or_deleted_statuses: set[str]
saved_statuses: set[str]

def throw_object_was_deleted(obj: Entity) -> NoReturn: ...
def unpickle_entity(d): ...
def safe_repr(obj: Entity) -> str: ...
def make_proxy(obj: Entity) -> EntityProxy: ...

class EntityProxy:
    def __init__(self, obj: Entity) -> None: ...
    def __getattr__(self, name: str): ...
    def __setattr__(self, name: str, value) -> None: ...
    def __eq__(self, other) -> bool: ...
    def __ne__(self, other) -> bool: ...

class Entity(metaclass=EntityMeta):
    __slots__ = (
        "_session_cache_",
        "_status_",
        "_pkval_",
        "_newid_",
        "_dbvals_",
        "_vals_",
        "_rbits_",
        "_wbits_",
        "_save_pos_",
        "__weakref__",
    )
    def __reduce__(obj): ...
    def __init__(obj, *args, **kwargs) -> None: ...
    def get_pk(obj): ...
    def __lt__(entity, other): ...
    def __le__(entity, other): ...
    def __gt__(entity, other): ...
    def __ge__(entity, other): ...
    def load(obj, *attrs) -> None: ...
    def delete(obj) -> None: ...
    def set(obj, **kwargs) -> None: ...
    def find_updated_attributes(obj): ...
    def flush(obj) -> None: ...
    def before_insert(obj) -> None: ...
    def before_update(obj) -> None: ...
    def before_delete(obj) -> None: ...
    def after_insert(obj) -> None: ...
    def after_update(obj) -> None: ...
    def after_delete(obj) -> None: ...
    def to_dict(
        obj, only=None, exclude=None, with_collections: bool = False, with_lazy: bool = False, related_objects: bool = False
    ): ...
    def to_json(obj, include=(), exclude=(), converter=None, with_schema: bool = True, schema_hash=None): ...

def string2ast(s: str) -> ast.Expr: ...
def get_globals_and_locals(args, kwargs, frame_depth, from_generator=False): ...
def make_query(args, frame_depth, left_join: bool = False) -> Query: ...
def select(*args): ...
def left_join(*args): ...
def get(*args): ...
def exists(*args): ...
def delete(*args): ...
def make_aggrfunc(std_func): ...

count: Incomplete
sum: Incomplete
min: Incomplete
max: Incomplete
avg: Incomplete
group_concat: Incomplete
distinct: Incomplete

def JOIN(expr: _T) -> _T: ...
def desc(expr): ...
def extract_vars(code_key, filter_num, extractors, globals, locals, cells=None): ...
def unpickle_query(query_result: _T) -> _T: ...

class Query:
    def __init__(query, code_key, tree, globals, locals, cells=None, left_join: bool = False) -> None: ...
    def __reduce__(query): ...
    def get_sql(query): ...
    def prefetch(query, *args): ...
    def show(query, width=None, stream=None) -> None: ...
    def get(query): ...
    def first(query): ...
    def without_distinct(query): ...
    def distinct(query): ...
    def exists(query): ...
    def delete(query, bulk=None): ...
    def __len__(query) -> int: ...
    def __iter__(query): ...
    def order_by(query, *args): ...
    def sort_by(query, *args): ...
    def filter(query, *args, **kwargs): ...
    def where(query, *args, **kwargs): ...
    def __getitem__(query, key): ...
    def fetch(query, limit=None, offset=None): ...
    def limit(query, limit=None, offset=None): ...
    def page(query, pagenum, pagesize: int = 10): ...
    def sum(query, distinct=None): ...
    def avg(query, distinct=None): ...
    def group_concat(query, sep=None, distinct=None): ...
    def min(query): ...
    def max(query): ...
    def count(query, distinct=None): ...
    def for_update(query, nowait: bool = False, skip_locked: bool = False): ...
    def random(query, limit): ...
    def to_json(query, include=(), exclude=(), converter=None, with_schema: bool = True, schema_hash=None): ...

class QueryResultIterator:
    __slots__ = ("_query_result", "_position")
    def __init__(self, query_result) -> None: ...
    def next(self): ...
    __next__ = next
    def __length_hint__(self) -> int: ...

def make_query_result_method_error_stub(name: str, title: str | None = None) -> Callable[..., NoReturn]: ...

class QueryResult:
    __slots__ = ("_query", "_limit", "_offset", "_items", "_expr_type", "_col_names")
    def __init__(self, query, limit, offset, lazy) -> None: ...
    def __iter__(self): ...
    def __len__(self) -> int: ...
    def __getitem__(self, key): ...
    def __contains__(self, item) -> bool: ...
    def index(self, item): ...
    def __eq__(self, other): ...
    def __ne__(self, other): ...
    def __lt__(self, other): ...
    def __le__(self, other): ...
    def __gt__(self, other): ...
    def __ge__(self, other): ...
    def __reversed__(self): ...
    def reverse(self) -> None: ...
    def sort(self, *args, **kwargs) -> None: ...
    def shuffle(self) -> None: ...
    def show(self, width=None, stream=None): ...
    def to_json(self, include=(), exclude=(), converter=None, with_schema: bool = True, schema_hash=None): ...
    def __add__(self, other): ...
    def __radd__(self, other): ...
    def to_list(self): ...
    __setitem__: Incomplete
    __delitem__: Incomplete
    __iadd__: Incomplete
    __imul__: Incomplete
    __mul__: Incomplete
    __rmul__: Incomplete
    append: Incomplete
    clear: Incomplete
    extend: Incomplete
    insert: Incomplete
    pop: Incomplete
    remove: Incomplete

def strcut(s: str, width: int) -> str: ...
def show(entity) -> None: ...

special_functions: set[Incomplete]
const_functions: set[type]
