from _typeshed import Incomplete

BF_RESERVE: str
BF_ADD: str
BF_MADD: str
BF_INSERT: str
BF_EXISTS: str
BF_MEXISTS: str
BF_SCANDUMP: str
BF_LOADCHUNK: str
BF_INFO: str
CF_RESERVE: str
CF_ADD: str
CF_ADDNX: str
CF_INSERT: str
CF_INSERTNX: str
CF_EXISTS: str
CF_DEL: str
CF_COUNT: str
CF_SCANDUMP: str
CF_LOADCHUNK: str
CF_INFO: str
CMS_INITBYDIM: str
CMS_INITBYPROB: str
CMS_INCRBY: str
CMS_QUERY: str
CMS_MERGE: str
CMS_INFO: str
TOPK_RESERVE: str
TOPK_ADD: str
TOPK_INCRBY: str
TOPK_QUERY: str
TOPK_COUNT: str
TOPK_LIST: str
TOPK_INFO: str
TDIGEST_CREATE: str
TDIGEST_RESET: str
TDIGEST_ADD: str
TDIGEST_MERGE: str
TDIGEST_CDF: str
TDIGEST_QUANTILE: str
TDIGEST_MIN: str
TDIGEST_MAX: str
TDIGEST_INFO: str

class BFCommands:
    def create(self, key, errorRate, capacity, expansion: Incomplete | None = None, noScale: Incomplete | None = None): ...
    def add(self, key, item): ...
    def madd(self, key, *items): ...
    def insert(
        self,
        key,
        items,
        capacity: Incomplete | None = None,
        error: Incomplete | None = None,
        noCreate: Incomplete | None = None,
        expansion: Incomplete | None = None,
        noScale: Incomplete | None = None,
    ): ...
    def exists(self, key, item): ...
    def mexists(self, key, *items): ...
    def scandump(self, key, iter): ...
    def loadchunk(self, key, iter, data): ...
    def info(self, key): ...

class CFCommands:
    def create(
        self,
        key,
        capacity,
        expansion: Incomplete | None = None,
        bucket_size: Incomplete | None = None,
        max_iterations: Incomplete | None = None,
    ): ...
    def add(self, key, item): ...
    def addnx(self, key, item): ...
    def insert(self, key, items, capacity: Incomplete | None = None, nocreate: Incomplete | None = None): ...
    def insertnx(self, key, items, capacity: Incomplete | None = None, nocreate: Incomplete | None = None): ...
    def exists(self, key, item): ...
    def delete(self, key, item): ...
    def count(self, key, item): ...
    def scandump(self, key, iter): ...
    def loadchunk(self, key, iter, data): ...
    def info(self, key): ...

class TOPKCommands:
    def reserve(self, key, k, width, depth, decay): ...
    def add(self, key, *items): ...
    def incrby(self, key, items, increments): ...
    def query(self, key, *items): ...
    def count(self, key, *items): ...
    def list(self, key, withcount: bool = False): ...
    def info(self, key): ...

class TDigestCommands:
    def create(self, key, compression: int = 100): ...
    def reset(self, key): ...
    def add(self, key, values): ...
    def merge(self, destination_key, num_keys, *keys, compression: int | None = None, override: bool = False): ...
    def min(self, key): ...
    def max(self, key): ...
    def quantile(self, key, quantile, *quantiles): ...
    def cdf(self, key, value, *values): ...
    def info(self, key): ...

class CMSCommands:
    def initbydim(self, key, width, depth): ...
    def initbyprob(self, key, error, probability): ...
    def incrby(self, key, items, increments): ...
    def query(self, key, *items): ...
    def merge(self, destKey, numKeys, srcKeys, weights=[]): ...
    def info(self, key): ...
