from _typeshed import Incomplete

from hvac.api.vault_api_base import VaultApiBase

DEFAULT_MOUNT_POINT: str

class Transform(VaultApiBase):
    def create_or_update_role(self, name, transformations, mount_point: str = "transform"): ...
    def read_role(self, name, mount_point: str = "transform"): ...
    def list_roles(self, mount_point: str = "transform"): ...
    def delete_role(self, name, mount_point: str = "transform"): ...
    def create_or_update_transformation(
        self,
        name,
        transform_type,
        template,
        tweak_source: str = "supplied",
        masking_character: str = "*",
        allowed_roles: Incomplete | None = None,
        mount_point: str = "transform",
    ): ...
    def create_or_update_fpe_transformation(
        self,
        name,
        template,
        tweak_source: str = "supplied",
        allowed_roles: Incomplete | None = None,
        mount_point: str = "transform",
    ): ...
    def create_or_update_masking_transformation(
        self,
        name,
        template,
        masking_character: str = "*",
        allowed_roles: Incomplete | None = None,
        mount_point: str = "transform",
    ): ...
    def create_or_update_tokenization_transformation(
        self,
        name,
        max_ttl: int = 0,
        mapping_mode: str = "default",
        allowed_roles: Incomplete | None = None,
        stores: Incomplete | None = None,
        mount_point: str = "transform",
    ): ...
    def read_transformation(self, name, mount_point: str = "transform"): ...
    def list_transformations(self, mount_point: str = "transform"): ...
    def delete_transformation(self, name, mount_point: str = "transform"): ...
    def create_or_update_template(self, name, template_type, pattern, alphabet, mount_point: str = "transform"): ...
    def read_template(self, name, mount_point: str = "transform"): ...
    def list_templates(self, mount_point: str = "transform"): ...
    def delete_template(self, name, mount_point: str = "transform"): ...
    def create_or_update_alphabet(self, name, alphabet, mount_point: str = "transform"): ...
    def read_alphabet(self, name, mount_point: str = "transform"): ...
    def list_alphabets(self, mount_point: str = "transform"): ...
    def delete_alphabet(self, name, mount_point: str = "transform"): ...
    def create_or_update_tokenization_store(
        self,
        name,
        driver,
        connection_string,
        username: Incomplete | None = None,
        password: Incomplete | None = None,
        type: str = "sql",
        supported_transformations: Incomplete | None = None,
        schema: str = "public",
        max_open_connections: int = 4,
        max_idle_connections: int = 4,
        max_connection_lifetime: int = 0,
        mount_point: str = "transform",
    ): ...
    def encode(
        self,
        role_name,
        value: Incomplete | None = None,
        transformation: Incomplete | None = None,
        tweak: Incomplete | None = None,
        batch_input: Incomplete | None = None,
        mount_point: str = "transform",
    ): ...
    def decode(
        self,
        role_name,
        value: Incomplete | None = None,
        transformation: Incomplete | None = None,
        tweak: Incomplete | None = None,
        batch_input: Incomplete | None = None,
        mount_point: str = "transform",
    ): ...
    def validate_token(
        self, role_name, value, transformation, batch_input: Incomplete | None = None, mount_point: str = "transform"
    ): ...
    def check_tokenization(
        self, role_name, value, transformation, batch_input: Incomplete | None = None, mount_point: str = "transform"
    ): ...
    def retrieve_token_metadata(
        self, role_name, value, transformation, batch_input: Incomplete | None = None, mount_point: str = "transform"
    ): ...
    def snapshot_tokenization_state(self, name, limit: int = 1000, continuation: str = "", mount_point: str = "transform"): ...
    def restore_tokenization_state(self, name, values, mount_point: str = "transform"): ...
    def export_decoded_tokenization_state(
        self, name, limit: int = 1000, continuation: str = "", mount_point: str = "transform"
    ): ...
    def rotate_tokenization_key(self, transform_name, mount_point: str = "transform"): ...
    def update_tokenization_key_config(self, transform_name, min_decryption_version, mount_point: str = "transform"): ...
    def list_tokenization_key_configuration(self, mount_point: str = "transform"): ...
    def read_tokenization_key_configuration(self, transform_name, mount_point: str = "transform"): ...
    def trim_tokenization_key_version(self, transform_name, min_available_version, mount_point: str = "transform"): ...
