#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements.  See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License.  You may obtain a copy of the License at
#
#    http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
@generated by mypy-protobuf.  Do not edit manually!
isort:skip_file

Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements.  See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License.  You may obtain a copy of the License at

   http://www.apache.org/licenses/LICENSE-2.0

Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.internal.enum_type_wrapper
import google.protobuf.message
import pyspark.sql.connect.proto.relations_pb2
import pyspark.sql.connect.proto.types_pb2
import sys
import typing

if sys.version_info >= (3, 10):
    import typing as typing_extensions
else:
    import typing_extensions

DESCRIPTOR: google.protobuf.descriptor.FileDescriptor

class Command(google.protobuf.message.Message):
    """A [[Command]] is an operation that is executed by the server that does not directly consume or
    produce a relational result.
    """

    DESCRIPTOR: google.protobuf.descriptor.Descriptor

    CREATE_FUNCTION_FIELD_NUMBER: builtins.int
    WRITE_OPERATION_FIELD_NUMBER: builtins.int
    @property
    def create_function(self) -> global___CreateScalarFunction: ...
    @property
    def write_operation(self) -> global___WriteOperation: ...
    def __init__(
        self,
        *,
        create_function: global___CreateScalarFunction | None = ...,
        write_operation: global___WriteOperation | None = ...,
    ) -> None: ...
    def HasField(
        self,
        field_name: typing_extensions.Literal[
            "command_type",
            b"command_type",
            "create_function",
            b"create_function",
            "write_operation",
            b"write_operation",
        ],
    ) -> builtins.bool: ...
    def ClearField(
        self,
        field_name: typing_extensions.Literal[
            "command_type",
            b"command_type",
            "create_function",
            b"create_function",
            "write_operation",
            b"write_operation",
        ],
    ) -> None: ...
    def WhichOneof(
        self, oneof_group: typing_extensions.Literal["command_type", b"command_type"]
    ) -> typing_extensions.Literal["create_function", "write_operation"] | None: ...

global___Command = Command

class CreateScalarFunction(google.protobuf.message.Message):
    """Simple message that is used to create a scalar function based on the provided function body.

    This message is used to register for example a Python UDF in the session catalog by providing
    the serialized method body.

    TODO(SPARK-40532) It is required to add the interpreter / language version to the command
      parameters.
    """

    DESCRIPTOR: google.protobuf.descriptor.Descriptor

    class _FunctionLanguage:
        ValueType = typing.NewType("ValueType", builtins.int)
        V: typing_extensions.TypeAlias = ValueType

    class _FunctionLanguageEnumTypeWrapper(
        google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[
            CreateScalarFunction._FunctionLanguage.ValueType
        ],
        builtins.type,
    ):  # noqa: F821
        DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
        FUNCTION_LANGUAGE_UNSPECIFIED: CreateScalarFunction._FunctionLanguage.ValueType  # 0
        FUNCTION_LANGUAGE_SQL: CreateScalarFunction._FunctionLanguage.ValueType  # 1
        FUNCTION_LANGUAGE_PYTHON: CreateScalarFunction._FunctionLanguage.ValueType  # 2
        FUNCTION_LANGUAGE_SCALA: CreateScalarFunction._FunctionLanguage.ValueType  # 3

    class FunctionLanguage(_FunctionLanguage, metaclass=_FunctionLanguageEnumTypeWrapper): ...
    FUNCTION_LANGUAGE_UNSPECIFIED: CreateScalarFunction.FunctionLanguage.ValueType  # 0
    FUNCTION_LANGUAGE_SQL: CreateScalarFunction.FunctionLanguage.ValueType  # 1
    FUNCTION_LANGUAGE_PYTHON: CreateScalarFunction.FunctionLanguage.ValueType  # 2
    FUNCTION_LANGUAGE_SCALA: CreateScalarFunction.FunctionLanguage.ValueType  # 3

    PARTS_FIELD_NUMBER: builtins.int
    LANGUAGE_FIELD_NUMBER: builtins.int
    TEMPORARY_FIELD_NUMBER: builtins.int
    ARGUMENT_TYPES_FIELD_NUMBER: builtins.int
    RETURN_TYPE_FIELD_NUMBER: builtins.int
    SERIALIZED_FUNCTION_FIELD_NUMBER: builtins.int
    LITERAL_STRING_FIELD_NUMBER: builtins.int
    @property
    def parts(
        self,
    ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
        """Fully qualified name of the function including the catalog / schema names."""
    language: global___CreateScalarFunction.FunctionLanguage.ValueType
    temporary: builtins.bool
    @property
    def argument_types(
        self,
    ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
        pyspark.sql.connect.proto.types_pb2.DataType
    ]: ...
    @property
    def return_type(self) -> pyspark.sql.connect.proto.types_pb2.DataType: ...
    serialized_function: builtins.bytes
    """As a raw string serialized:"""
    literal_string: builtins.str
    """As a code literal"""
    def __init__(
        self,
        *,
        parts: collections.abc.Iterable[builtins.str] | None = ...,
        language: global___CreateScalarFunction.FunctionLanguage.ValueType = ...,
        temporary: builtins.bool = ...,
        argument_types: collections.abc.Iterable[pyspark.sql.connect.proto.types_pb2.DataType]
        | None = ...,
        return_type: pyspark.sql.connect.proto.types_pb2.DataType | None = ...,
        serialized_function: builtins.bytes = ...,
        literal_string: builtins.str = ...,
    ) -> None: ...
    def HasField(
        self,
        field_name: typing_extensions.Literal[
            "function_definition",
            b"function_definition",
            "literal_string",
            b"literal_string",
            "return_type",
            b"return_type",
            "serialized_function",
            b"serialized_function",
        ],
    ) -> builtins.bool: ...
    def ClearField(
        self,
        field_name: typing_extensions.Literal[
            "argument_types",
            b"argument_types",
            "function_definition",
            b"function_definition",
            "language",
            b"language",
            "literal_string",
            b"literal_string",
            "parts",
            b"parts",
            "return_type",
            b"return_type",
            "serialized_function",
            b"serialized_function",
            "temporary",
            b"temporary",
        ],
    ) -> None: ...
    def WhichOneof(
        self, oneof_group: typing_extensions.Literal["function_definition", b"function_definition"]
    ) -> typing_extensions.Literal["serialized_function", "literal_string"] | None: ...

global___CreateScalarFunction = CreateScalarFunction

class WriteOperation(google.protobuf.message.Message):
    """As writes are not directly handled during analysis and planning, they are modeled as commands."""

    DESCRIPTOR: google.protobuf.descriptor.Descriptor

    class _SaveMode:
        ValueType = typing.NewType("ValueType", builtins.int)
        V: typing_extensions.TypeAlias = ValueType

    class _SaveModeEnumTypeWrapper(
        google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[
            WriteOperation._SaveMode.ValueType
        ],
        builtins.type,
    ):  # noqa: F821
        DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
        SAVE_MODE_UNSPECIFIED: WriteOperation._SaveMode.ValueType  # 0
        SAVE_MODE_APPEND: WriteOperation._SaveMode.ValueType  # 1
        SAVE_MODE_OVERWRITE: WriteOperation._SaveMode.ValueType  # 2
        SAVE_MODE_ERROR_IF_EXISTS: WriteOperation._SaveMode.ValueType  # 3
        SAVE_MODE_IGNORE: WriteOperation._SaveMode.ValueType  # 4

    class SaveMode(_SaveMode, metaclass=_SaveModeEnumTypeWrapper): ...
    SAVE_MODE_UNSPECIFIED: WriteOperation.SaveMode.ValueType  # 0
    SAVE_MODE_APPEND: WriteOperation.SaveMode.ValueType  # 1
    SAVE_MODE_OVERWRITE: WriteOperation.SaveMode.ValueType  # 2
    SAVE_MODE_ERROR_IF_EXISTS: WriteOperation.SaveMode.ValueType  # 3
    SAVE_MODE_IGNORE: WriteOperation.SaveMode.ValueType  # 4

    class OptionsEntry(google.protobuf.message.Message):
        DESCRIPTOR: google.protobuf.descriptor.Descriptor

        KEY_FIELD_NUMBER: builtins.int
        VALUE_FIELD_NUMBER: builtins.int
        key: builtins.str
        value: builtins.str
        def __init__(
            self,
            *,
            key: builtins.str = ...,
            value: builtins.str = ...,
        ) -> None: ...
        def ClearField(
            self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]
        ) -> None: ...

    class BucketBy(google.protobuf.message.Message):
        DESCRIPTOR: google.protobuf.descriptor.Descriptor

        BUCKET_COLUMN_NAMES_FIELD_NUMBER: builtins.int
        NUM_BUCKETS_FIELD_NUMBER: builtins.int
        @property
        def bucket_column_names(
            self,
        ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ...
        num_buckets: builtins.int
        def __init__(
            self,
            *,
            bucket_column_names: collections.abc.Iterable[builtins.str] | None = ...,
            num_buckets: builtins.int = ...,
        ) -> None: ...
        def ClearField(
            self,
            field_name: typing_extensions.Literal[
                "bucket_column_names", b"bucket_column_names", "num_buckets", b"num_buckets"
            ],
        ) -> None: ...

    INPUT_FIELD_NUMBER: builtins.int
    SOURCE_FIELD_NUMBER: builtins.int
    PATH_FIELD_NUMBER: builtins.int
    TABLE_NAME_FIELD_NUMBER: builtins.int
    MODE_FIELD_NUMBER: builtins.int
    SORT_COLUMN_NAMES_FIELD_NUMBER: builtins.int
    PARTITIONING_COLUMNS_FIELD_NUMBER: builtins.int
    BUCKET_BY_FIELD_NUMBER: builtins.int
    OPTIONS_FIELD_NUMBER: builtins.int
    @property
    def input(self) -> pyspark.sql.connect.proto.relations_pb2.Relation:
        """The output of the `input` relation will be persisted according to the options."""
    source: builtins.str
    """Format value according to the Spark documentation. Examples are: text, parquet, delta."""
    path: builtins.str
    table_name: builtins.str
    mode: global___WriteOperation.SaveMode.ValueType
    @property
    def sort_column_names(
        self,
    ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
        """List of columns to sort the output by."""
    @property
    def partitioning_columns(
        self,
    ) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
        """List of columns for partitioning."""
    @property
    def bucket_by(self) -> global___WriteOperation.BucketBy:
        """Optional bucketing specification. Bucketing must set the number of buckets and the columns
        to bucket by.
        """
    @property
    def options(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.str]:
        """Optional list of configuration options."""
    def __init__(
        self,
        *,
        input: pyspark.sql.connect.proto.relations_pb2.Relation | None = ...,
        source: builtins.str = ...,
        path: builtins.str = ...,
        table_name: builtins.str = ...,
        mode: global___WriteOperation.SaveMode.ValueType = ...,
        sort_column_names: collections.abc.Iterable[builtins.str] | None = ...,
        partitioning_columns: collections.abc.Iterable[builtins.str] | None = ...,
        bucket_by: global___WriteOperation.BucketBy | None = ...,
        options: collections.abc.Mapping[builtins.str, builtins.str] | None = ...,
    ) -> None: ...
    def HasField(
        self,
        field_name: typing_extensions.Literal[
            "bucket_by",
            b"bucket_by",
            "input",
            b"input",
            "path",
            b"path",
            "save_type",
            b"save_type",
            "table_name",
            b"table_name",
        ],
    ) -> builtins.bool: ...
    def ClearField(
        self,
        field_name: typing_extensions.Literal[
            "bucket_by",
            b"bucket_by",
            "input",
            b"input",
            "mode",
            b"mode",
            "options",
            b"options",
            "partitioning_columns",
            b"partitioning_columns",
            "path",
            b"path",
            "save_type",
            b"save_type",
            "sort_column_names",
            b"sort_column_names",
            "source",
            b"source",
            "table_name",
            b"table_name",
        ],
    ) -> None: ...
    def WhichOneof(
        self, oneof_group: typing_extensions.Literal["save_type", b"save_type"]
    ) -> typing_extensions.Literal["path", "table_name"] | None: ...

global___WriteOperation = WriteOperation
