python/pyspark/sql/connect/proto/expressions_pb2.pyi (1,764 lines of code) (raw):
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import builtins
import collections.abc
import google.protobuf.any_pb2
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.internal.enum_type_wrapper
import google.protobuf.message
import pyspark.sql.connect.proto.common_pb2
import pyspark.sql.connect.proto.types_pb2
import sys
import typing
if sys.version_info >= (3, 10):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
class Expression(google.protobuf.message.Message):
"""Expression used to refer to fields, functions and similar. This can be used everywhere
expressions in SQL appear.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
class Window(google.protobuf.message.Message):
"""Expression for the OVER clause or WINDOW clause."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
class WindowFrame(google.protobuf.message.Message):
"""The window frame"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
class _FrameType:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _FrameTypeEnumTypeWrapper(
google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[
Expression.Window.WindowFrame._FrameType.ValueType
],
builtins.type,
): # noqa: F821
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
FRAME_TYPE_UNDEFINED: Expression.Window.WindowFrame._FrameType.ValueType # 0
FRAME_TYPE_ROW: Expression.Window.WindowFrame._FrameType.ValueType # 1
"""RowFrame treats rows in a partition individually."""
FRAME_TYPE_RANGE: Expression.Window.WindowFrame._FrameType.ValueType # 2
"""RangeFrame treats rows in a partition as groups of peers.
All rows having the same 'ORDER BY' ordering are considered as peers.
"""
class FrameType(_FrameType, metaclass=_FrameTypeEnumTypeWrapper): ...
FRAME_TYPE_UNDEFINED: Expression.Window.WindowFrame.FrameType.ValueType # 0
FRAME_TYPE_ROW: Expression.Window.WindowFrame.FrameType.ValueType # 1
"""RowFrame treats rows in a partition individually."""
FRAME_TYPE_RANGE: Expression.Window.WindowFrame.FrameType.ValueType # 2
"""RangeFrame treats rows in a partition as groups of peers.
All rows having the same 'ORDER BY' ordering are considered as peers.
"""
class FrameBoundary(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
CURRENT_ROW_FIELD_NUMBER: builtins.int
UNBOUNDED_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
current_row: builtins.bool
"""CURRENT ROW boundary"""
unbounded: builtins.bool
"""UNBOUNDED boundary.
For lower bound, it will be converted to 'UnboundedPreceding'.
for upper bound, it will be converted to 'UnboundedFollowing'.
"""
@property
def value(self) -> global___Expression:
"""This is an expression for future proofing. We are expecting literals on the server side."""
def __init__(
self,
*,
current_row: builtins.bool = ...,
unbounded: builtins.bool = ...,
value: global___Expression | None = ...,
) -> None: ...
def HasField(
self,
field_name: typing_extensions.Literal[
"boundary",
b"boundary",
"current_row",
b"current_row",
"unbounded",
b"unbounded",
"value",
b"value",
],
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"boundary",
b"boundary",
"current_row",
b"current_row",
"unbounded",
b"unbounded",
"value",
b"value",
],
) -> None: ...
def WhichOneof(
self, oneof_group: typing_extensions.Literal["boundary", b"boundary"]
) -> typing_extensions.Literal["current_row", "unbounded", "value"] | None: ...
FRAME_TYPE_FIELD_NUMBER: builtins.int
LOWER_FIELD_NUMBER: builtins.int
UPPER_FIELD_NUMBER: builtins.int
frame_type: global___Expression.Window.WindowFrame.FrameType.ValueType
"""(Required) The type of the frame."""
@property
def lower(self) -> global___Expression.Window.WindowFrame.FrameBoundary:
"""(Required) The lower bound of the frame."""
@property
def upper(self) -> global___Expression.Window.WindowFrame.FrameBoundary:
"""(Required) The upper bound of the frame."""
def __init__(
self,
*,
frame_type: global___Expression.Window.WindowFrame.FrameType.ValueType = ...,
lower: global___Expression.Window.WindowFrame.FrameBoundary | None = ...,
upper: global___Expression.Window.WindowFrame.FrameBoundary | None = ...,
) -> None: ...
def HasField(
self, field_name: typing_extensions.Literal["lower", b"lower", "upper", b"upper"]
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"frame_type", b"frame_type", "lower", b"lower", "upper", b"upper"
],
) -> None: ...
WINDOW_FUNCTION_FIELD_NUMBER: builtins.int
PARTITION_SPEC_FIELD_NUMBER: builtins.int
ORDER_SPEC_FIELD_NUMBER: builtins.int
FRAME_SPEC_FIELD_NUMBER: builtins.int
@property
def window_function(self) -> global___Expression:
"""(Required) The window function."""
@property
def partition_spec(
self,
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
global___Expression
]:
"""(Optional) The way that input rows are partitioned."""
@property
def order_spec(
self,
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
global___Expression.SortOrder
]:
"""(Optional) Ordering of rows in a partition."""
@property
def frame_spec(self) -> global___Expression.Window.WindowFrame:
"""(Optional) Window frame in a partition.
If not set, it will be treated as 'UnspecifiedFrame'.
"""
def __init__(
self,
*,
window_function: global___Expression | None = ...,
partition_spec: collections.abc.Iterable[global___Expression] | None = ...,
order_spec: collections.abc.Iterable[global___Expression.SortOrder] | None = ...,
frame_spec: global___Expression.Window.WindowFrame | None = ...,
) -> None: ...
def HasField(
self,
field_name: typing_extensions.Literal[
"frame_spec", b"frame_spec", "window_function", b"window_function"
],
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"frame_spec",
b"frame_spec",
"order_spec",
b"order_spec",
"partition_spec",
b"partition_spec",
"window_function",
b"window_function",
],
) -> None: ...
class SortOrder(google.protobuf.message.Message):
"""SortOrder is used to specify the data ordering, it is normally used in Sort and Window.
It is an unevaluable expression and cannot be evaluated, so can not be used in Projection.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
class _SortDirection:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _SortDirectionEnumTypeWrapper(
google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[
Expression.SortOrder._SortDirection.ValueType
],
builtins.type,
): # noqa: F821
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
SORT_DIRECTION_UNSPECIFIED: Expression.SortOrder._SortDirection.ValueType # 0
SORT_DIRECTION_ASCENDING: Expression.SortOrder._SortDirection.ValueType # 1
SORT_DIRECTION_DESCENDING: Expression.SortOrder._SortDirection.ValueType # 2
class SortDirection(_SortDirection, metaclass=_SortDirectionEnumTypeWrapper): ...
SORT_DIRECTION_UNSPECIFIED: Expression.SortOrder.SortDirection.ValueType # 0
SORT_DIRECTION_ASCENDING: Expression.SortOrder.SortDirection.ValueType # 1
SORT_DIRECTION_DESCENDING: Expression.SortOrder.SortDirection.ValueType # 2
class _NullOrdering:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _NullOrderingEnumTypeWrapper(
google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[
Expression.SortOrder._NullOrdering.ValueType
],
builtins.type,
): # noqa: F821
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
SORT_NULLS_UNSPECIFIED: Expression.SortOrder._NullOrdering.ValueType # 0
SORT_NULLS_FIRST: Expression.SortOrder._NullOrdering.ValueType # 1
SORT_NULLS_LAST: Expression.SortOrder._NullOrdering.ValueType # 2
class NullOrdering(_NullOrdering, metaclass=_NullOrderingEnumTypeWrapper): ...
SORT_NULLS_UNSPECIFIED: Expression.SortOrder.NullOrdering.ValueType # 0
SORT_NULLS_FIRST: Expression.SortOrder.NullOrdering.ValueType # 1
SORT_NULLS_LAST: Expression.SortOrder.NullOrdering.ValueType # 2
CHILD_FIELD_NUMBER: builtins.int
DIRECTION_FIELD_NUMBER: builtins.int
NULL_ORDERING_FIELD_NUMBER: builtins.int
@property
def child(self) -> global___Expression:
"""(Required) The expression to be sorted."""
direction: global___Expression.SortOrder.SortDirection.ValueType
"""(Required) The sort direction, should be ASCENDING or DESCENDING."""
null_ordering: global___Expression.SortOrder.NullOrdering.ValueType
"""(Required) How to deal with NULLs, should be NULLS_FIRST or NULLS_LAST."""
def __init__(
self,
*,
child: global___Expression | None = ...,
direction: global___Expression.SortOrder.SortDirection.ValueType = ...,
null_ordering: global___Expression.SortOrder.NullOrdering.ValueType = ...,
) -> None: ...
def HasField(
self, field_name: typing_extensions.Literal["child", b"child"]
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"child", b"child", "direction", b"direction", "null_ordering", b"null_ordering"
],
) -> None: ...
class Cast(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
class _EvalMode:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _EvalModeEnumTypeWrapper(
google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[
Expression.Cast._EvalMode.ValueType
],
builtins.type,
): # noqa: F821
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
EVAL_MODE_UNSPECIFIED: Expression.Cast._EvalMode.ValueType # 0
EVAL_MODE_LEGACY: Expression.Cast._EvalMode.ValueType # 1
EVAL_MODE_ANSI: Expression.Cast._EvalMode.ValueType # 2
EVAL_MODE_TRY: Expression.Cast._EvalMode.ValueType # 3
class EvalMode(_EvalMode, metaclass=_EvalModeEnumTypeWrapper): ...
EVAL_MODE_UNSPECIFIED: Expression.Cast.EvalMode.ValueType # 0
EVAL_MODE_LEGACY: Expression.Cast.EvalMode.ValueType # 1
EVAL_MODE_ANSI: Expression.Cast.EvalMode.ValueType # 2
EVAL_MODE_TRY: Expression.Cast.EvalMode.ValueType # 3
EXPR_FIELD_NUMBER: builtins.int
TYPE_FIELD_NUMBER: builtins.int
TYPE_STR_FIELD_NUMBER: builtins.int
EVAL_MODE_FIELD_NUMBER: builtins.int
@property
def expr(self) -> global___Expression:
"""(Required) the expression to be casted."""
@property
def type(self) -> pyspark.sql.connect.proto.types_pb2.DataType: ...
type_str: builtins.str
"""If this is set, Server will use Catalyst parser to parse this string to DataType."""
eval_mode: global___Expression.Cast.EvalMode.ValueType
"""(Optional) The expression evaluation mode."""
def __init__(
self,
*,
expr: global___Expression | None = ...,
type: pyspark.sql.connect.proto.types_pb2.DataType | None = ...,
type_str: builtins.str = ...,
eval_mode: global___Expression.Cast.EvalMode.ValueType = ...,
) -> None: ...
def HasField(
self,
field_name: typing_extensions.Literal[
"cast_to_type",
b"cast_to_type",
"expr",
b"expr",
"type",
b"type",
"type_str",
b"type_str",
],
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"cast_to_type",
b"cast_to_type",
"eval_mode",
b"eval_mode",
"expr",
b"expr",
"type",
b"type",
"type_str",
b"type_str",
],
) -> None: ...
def WhichOneof(
self, oneof_group: typing_extensions.Literal["cast_to_type", b"cast_to_type"]
) -> typing_extensions.Literal["type", "type_str"] | None: ...
class Literal(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
class Decimal(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
VALUE_FIELD_NUMBER: builtins.int
PRECISION_FIELD_NUMBER: builtins.int
SCALE_FIELD_NUMBER: builtins.int
value: builtins.str
"""the string representation."""
precision: builtins.int
"""The maximum number of digits allowed in the value.
the maximum precision is 38.
"""
scale: builtins.int
"""declared scale of decimal literal"""
def __init__(
self,
*,
value: builtins.str = ...,
precision: builtins.int | None = ...,
scale: builtins.int | None = ...,
) -> None: ...
def HasField(
self,
field_name: typing_extensions.Literal[
"_precision",
b"_precision",
"_scale",
b"_scale",
"precision",
b"precision",
"scale",
b"scale",
],
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"_precision",
b"_precision",
"_scale",
b"_scale",
"precision",
b"precision",
"scale",
b"scale",
"value",
b"value",
],
) -> None: ...
@typing.overload
def WhichOneof(
self, oneof_group: typing_extensions.Literal["_precision", b"_precision"]
) -> typing_extensions.Literal["precision"] | None: ...
@typing.overload
def WhichOneof(
self, oneof_group: typing_extensions.Literal["_scale", b"_scale"]
) -> typing_extensions.Literal["scale"] | None: ...
class CalendarInterval(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
MONTHS_FIELD_NUMBER: builtins.int
DAYS_FIELD_NUMBER: builtins.int
MICROSECONDS_FIELD_NUMBER: builtins.int
months: builtins.int
days: builtins.int
microseconds: builtins.int
def __init__(
self,
*,
months: builtins.int = ...,
days: builtins.int = ...,
microseconds: builtins.int = ...,
) -> None: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"days", b"days", "microseconds", b"microseconds", "months", b"months"
],
) -> None: ...
class Array(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
ELEMENT_TYPE_FIELD_NUMBER: builtins.int
ELEMENTS_FIELD_NUMBER: builtins.int
@property
def element_type(self) -> pyspark.sql.connect.proto.types_pb2.DataType: ...
@property
def elements(
self,
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
global___Expression.Literal
]: ...
def __init__(
self,
*,
element_type: pyspark.sql.connect.proto.types_pb2.DataType | None = ...,
elements: collections.abc.Iterable[global___Expression.Literal] | None = ...,
) -> None: ...
def HasField(
self, field_name: typing_extensions.Literal["element_type", b"element_type"]
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"element_type", b"element_type", "elements", b"elements"
],
) -> None: ...
class Map(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
KEY_TYPE_FIELD_NUMBER: builtins.int
VALUE_TYPE_FIELD_NUMBER: builtins.int
KEYS_FIELD_NUMBER: builtins.int
VALUES_FIELD_NUMBER: builtins.int
@property
def key_type(self) -> pyspark.sql.connect.proto.types_pb2.DataType: ...
@property
def value_type(self) -> pyspark.sql.connect.proto.types_pb2.DataType: ...
@property
def keys(
self,
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
global___Expression.Literal
]: ...
@property
def values(
self,
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
global___Expression.Literal
]: ...
def __init__(
self,
*,
key_type: pyspark.sql.connect.proto.types_pb2.DataType | None = ...,
value_type: pyspark.sql.connect.proto.types_pb2.DataType | None = ...,
keys: collections.abc.Iterable[global___Expression.Literal] | None = ...,
values: collections.abc.Iterable[global___Expression.Literal] | None = ...,
) -> None: ...
def HasField(
self,
field_name: typing_extensions.Literal[
"key_type", b"key_type", "value_type", b"value_type"
],
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"key_type",
b"key_type",
"keys",
b"keys",
"value_type",
b"value_type",
"values",
b"values",
],
) -> None: ...
class Struct(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
STRUCT_TYPE_FIELD_NUMBER: builtins.int
ELEMENTS_FIELD_NUMBER: builtins.int
@property
def struct_type(self) -> pyspark.sql.connect.proto.types_pb2.DataType: ...
@property
def elements(
self,
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
global___Expression.Literal
]: ...
def __init__(
self,
*,
struct_type: pyspark.sql.connect.proto.types_pb2.DataType | None = ...,
elements: collections.abc.Iterable[global___Expression.Literal] | None = ...,
) -> None: ...
def HasField(
self, field_name: typing_extensions.Literal["struct_type", b"struct_type"]
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"elements", b"elements", "struct_type", b"struct_type"
],
) -> None: ...
class SpecializedArray(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
BOOLS_FIELD_NUMBER: builtins.int
INTS_FIELD_NUMBER: builtins.int
LONGS_FIELD_NUMBER: builtins.int
FLOATS_FIELD_NUMBER: builtins.int
DOUBLES_FIELD_NUMBER: builtins.int
STRINGS_FIELD_NUMBER: builtins.int
@property
def bools(self) -> pyspark.sql.connect.proto.common_pb2.Bools: ...
@property
def ints(self) -> pyspark.sql.connect.proto.common_pb2.Ints: ...
@property
def longs(self) -> pyspark.sql.connect.proto.common_pb2.Longs: ...
@property
def floats(self) -> pyspark.sql.connect.proto.common_pb2.Floats: ...
@property
def doubles(self) -> pyspark.sql.connect.proto.common_pb2.Doubles: ...
@property
def strings(self) -> pyspark.sql.connect.proto.common_pb2.Strings: ...
def __init__(
self,
*,
bools: pyspark.sql.connect.proto.common_pb2.Bools | None = ...,
ints: pyspark.sql.connect.proto.common_pb2.Ints | None = ...,
longs: pyspark.sql.connect.proto.common_pb2.Longs | None = ...,
floats: pyspark.sql.connect.proto.common_pb2.Floats | None = ...,
doubles: pyspark.sql.connect.proto.common_pb2.Doubles | None = ...,
strings: pyspark.sql.connect.proto.common_pb2.Strings | None = ...,
) -> None: ...
def HasField(
self,
field_name: typing_extensions.Literal[
"bools",
b"bools",
"doubles",
b"doubles",
"floats",
b"floats",
"ints",
b"ints",
"longs",
b"longs",
"strings",
b"strings",
"value_type",
b"value_type",
],
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"bools",
b"bools",
"doubles",
b"doubles",
"floats",
b"floats",
"ints",
b"ints",
"longs",
b"longs",
"strings",
b"strings",
"value_type",
b"value_type",
],
) -> None: ...
def WhichOneof(
self, oneof_group: typing_extensions.Literal["value_type", b"value_type"]
) -> (
typing_extensions.Literal["bools", "ints", "longs", "floats", "doubles", "strings"]
| None
): ...
NULL_FIELD_NUMBER: builtins.int
BINARY_FIELD_NUMBER: builtins.int
BOOLEAN_FIELD_NUMBER: builtins.int
BYTE_FIELD_NUMBER: builtins.int
SHORT_FIELD_NUMBER: builtins.int
INTEGER_FIELD_NUMBER: builtins.int
LONG_FIELD_NUMBER: builtins.int
FLOAT_FIELD_NUMBER: builtins.int
DOUBLE_FIELD_NUMBER: builtins.int
DECIMAL_FIELD_NUMBER: builtins.int
STRING_FIELD_NUMBER: builtins.int
DATE_FIELD_NUMBER: builtins.int
TIMESTAMP_FIELD_NUMBER: builtins.int
TIMESTAMP_NTZ_FIELD_NUMBER: builtins.int
CALENDAR_INTERVAL_FIELD_NUMBER: builtins.int
YEAR_MONTH_INTERVAL_FIELD_NUMBER: builtins.int
DAY_TIME_INTERVAL_FIELD_NUMBER: builtins.int
ARRAY_FIELD_NUMBER: builtins.int
MAP_FIELD_NUMBER: builtins.int
STRUCT_FIELD_NUMBER: builtins.int
SPECIALIZED_ARRAY_FIELD_NUMBER: builtins.int
@property
def null(self) -> pyspark.sql.connect.proto.types_pb2.DataType: ...
binary: builtins.bytes
boolean: builtins.bool
byte: builtins.int
short: builtins.int
integer: builtins.int
long: builtins.int
float: builtins.float
double: builtins.float
@property
def decimal(self) -> global___Expression.Literal.Decimal: ...
string: builtins.str
date: builtins.int
"""Date in units of days since the UNIX epoch."""
timestamp: builtins.int
"""Timestamp in units of microseconds since the UNIX epoch."""
timestamp_ntz: builtins.int
"""Timestamp in units of microseconds since the UNIX epoch (without timezone information)."""
@property
def calendar_interval(self) -> global___Expression.Literal.CalendarInterval: ...
year_month_interval: builtins.int
day_time_interval: builtins.int
@property
def array(self) -> global___Expression.Literal.Array: ...
@property
def map(self) -> global___Expression.Literal.Map: ...
@property
def struct(self) -> global___Expression.Literal.Struct: ...
@property
def specialized_array(self) -> global___Expression.Literal.SpecializedArray: ...
def __init__(
self,
*,
null: pyspark.sql.connect.proto.types_pb2.DataType | None = ...,
binary: builtins.bytes = ...,
boolean: builtins.bool = ...,
byte: builtins.int = ...,
short: builtins.int = ...,
integer: builtins.int = ...,
long: builtins.int = ...,
float: builtins.float = ...,
double: builtins.float = ...,
decimal: global___Expression.Literal.Decimal | None = ...,
string: builtins.str = ...,
date: builtins.int = ...,
timestamp: builtins.int = ...,
timestamp_ntz: builtins.int = ...,
calendar_interval: global___Expression.Literal.CalendarInterval | None = ...,
year_month_interval: builtins.int = ...,
day_time_interval: builtins.int = ...,
array: global___Expression.Literal.Array | None = ...,
map: global___Expression.Literal.Map | None = ...,
struct: global___Expression.Literal.Struct | None = ...,
specialized_array: global___Expression.Literal.SpecializedArray | None = ...,
) -> None: ...
def HasField(
self,
field_name: typing_extensions.Literal[
"array",
b"array",
"binary",
b"binary",
"boolean",
b"boolean",
"byte",
b"byte",
"calendar_interval",
b"calendar_interval",
"date",
b"date",
"day_time_interval",
b"day_time_interval",
"decimal",
b"decimal",
"double",
b"double",
"float",
b"float",
"integer",
b"integer",
"literal_type",
b"literal_type",
"long",
b"long",
"map",
b"map",
"null",
b"null",
"short",
b"short",
"specialized_array",
b"specialized_array",
"string",
b"string",
"struct",
b"struct",
"timestamp",
b"timestamp",
"timestamp_ntz",
b"timestamp_ntz",
"year_month_interval",
b"year_month_interval",
],
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"array",
b"array",
"binary",
b"binary",
"boolean",
b"boolean",
"byte",
b"byte",
"calendar_interval",
b"calendar_interval",
"date",
b"date",
"day_time_interval",
b"day_time_interval",
"decimal",
b"decimal",
"double",
b"double",
"float",
b"float",
"integer",
b"integer",
"literal_type",
b"literal_type",
"long",
b"long",
"map",
b"map",
"null",
b"null",
"short",
b"short",
"specialized_array",
b"specialized_array",
"string",
b"string",
"struct",
b"struct",
"timestamp",
b"timestamp",
"timestamp_ntz",
b"timestamp_ntz",
"year_month_interval",
b"year_month_interval",
],
) -> None: ...
def WhichOneof(
self, oneof_group: typing_extensions.Literal["literal_type", b"literal_type"]
) -> (
typing_extensions.Literal[
"null",
"binary",
"boolean",
"byte",
"short",
"integer",
"long",
"float",
"double",
"decimal",
"string",
"date",
"timestamp",
"timestamp_ntz",
"calendar_interval",
"year_month_interval",
"day_time_interval",
"array",
"map",
"struct",
"specialized_array",
]
| None
): ...
class UnresolvedAttribute(google.protobuf.message.Message):
"""An unresolved attribute that is not explicitly bound to a specific column, but the column
is resolved during analysis by name.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
UNPARSED_IDENTIFIER_FIELD_NUMBER: builtins.int
PLAN_ID_FIELD_NUMBER: builtins.int
IS_METADATA_COLUMN_FIELD_NUMBER: builtins.int
unparsed_identifier: builtins.str
"""(Required) An identifier that will be parsed by Catalyst parser. This should follow the
Spark SQL identifier syntax.
"""
plan_id: builtins.int
"""(Optional) The id of corresponding connect plan."""
is_metadata_column: builtins.bool
"""(Optional) The requested column is a metadata column."""
def __init__(
self,
*,
unparsed_identifier: builtins.str = ...,
plan_id: builtins.int | None = ...,
is_metadata_column: builtins.bool | None = ...,
) -> None: ...
def HasField(
self,
field_name: typing_extensions.Literal[
"_is_metadata_column",
b"_is_metadata_column",
"_plan_id",
b"_plan_id",
"is_metadata_column",
b"is_metadata_column",
"plan_id",
b"plan_id",
],
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"_is_metadata_column",
b"_is_metadata_column",
"_plan_id",
b"_plan_id",
"is_metadata_column",
b"is_metadata_column",
"plan_id",
b"plan_id",
"unparsed_identifier",
b"unparsed_identifier",
],
) -> None: ...
@typing.overload
def WhichOneof(
self,
oneof_group: typing_extensions.Literal["_is_metadata_column", b"_is_metadata_column"],
) -> typing_extensions.Literal["is_metadata_column"] | None: ...
@typing.overload
def WhichOneof(
self, oneof_group: typing_extensions.Literal["_plan_id", b"_plan_id"]
) -> typing_extensions.Literal["plan_id"] | None: ...
class UnresolvedFunction(google.protobuf.message.Message):
"""An unresolved function is not explicitly bound to one explicit function, but the function
is resolved during analysis following Sparks name resolution rules.
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
FUNCTION_NAME_FIELD_NUMBER: builtins.int
ARGUMENTS_FIELD_NUMBER: builtins.int
IS_DISTINCT_FIELD_NUMBER: builtins.int
IS_USER_DEFINED_FUNCTION_FIELD_NUMBER: builtins.int
IS_INTERNAL_FIELD_NUMBER: builtins.int
function_name: builtins.str
"""(Required) name (or unparsed name for user defined function) for the unresolved function."""
@property
def arguments(
self,
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
global___Expression
]:
"""(Optional) Function arguments. Empty arguments are allowed."""
is_distinct: builtins.bool
"""(Required) Indicate if this function should be applied on distinct values."""
is_user_defined_function: builtins.bool
"""(Required) Indicate if this is a user defined function.
When it is not a user defined function, Connect will use the function name directly.
When it is a user defined function, Connect will parse the function name first.
"""
is_internal: builtins.bool
"""(Optional) Indicate if this function is defined in the internal function registry.
If not set, the server will try to look up the function in the internal function registry
and decide appropriately.
"""
def __init__(
self,
*,
function_name: builtins.str = ...,
arguments: collections.abc.Iterable[global___Expression] | None = ...,
is_distinct: builtins.bool = ...,
is_user_defined_function: builtins.bool = ...,
is_internal: builtins.bool | None = ...,
) -> None: ...
def HasField(
self,
field_name: typing_extensions.Literal[
"_is_internal", b"_is_internal", "is_internal", b"is_internal"
],
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"_is_internal",
b"_is_internal",
"arguments",
b"arguments",
"function_name",
b"function_name",
"is_distinct",
b"is_distinct",
"is_internal",
b"is_internal",
"is_user_defined_function",
b"is_user_defined_function",
],
) -> None: ...
def WhichOneof(
self, oneof_group: typing_extensions.Literal["_is_internal", b"_is_internal"]
) -> typing_extensions.Literal["is_internal"] | None: ...
class ExpressionString(google.protobuf.message.Message):
"""Expression as string."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
EXPRESSION_FIELD_NUMBER: builtins.int
expression: builtins.str
"""(Required) A SQL expression that will be parsed by Catalyst parser."""
def __init__(
self,
*,
expression: builtins.str = ...,
) -> None: ...
def ClearField(
self, field_name: typing_extensions.Literal["expression", b"expression"]
) -> None: ...
class UnresolvedStar(google.protobuf.message.Message):
"""UnresolvedStar is used to expand all the fields of a relation or struct."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
UNPARSED_TARGET_FIELD_NUMBER: builtins.int
PLAN_ID_FIELD_NUMBER: builtins.int
unparsed_target: builtins.str
"""(Optional) The target of the expansion.
If set, it should end with '.*' and will be parsed by 'parseAttributeName'
in the server side.
"""
plan_id: builtins.int
"""(Optional) The id of corresponding connect plan."""
def __init__(
self,
*,
unparsed_target: builtins.str | None = ...,
plan_id: builtins.int | None = ...,
) -> None: ...
def HasField(
self,
field_name: typing_extensions.Literal[
"_plan_id",
b"_plan_id",
"_unparsed_target",
b"_unparsed_target",
"plan_id",
b"plan_id",
"unparsed_target",
b"unparsed_target",
],
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"_plan_id",
b"_plan_id",
"_unparsed_target",
b"_unparsed_target",
"plan_id",
b"plan_id",
"unparsed_target",
b"unparsed_target",
],
) -> None: ...
@typing.overload
def WhichOneof(
self, oneof_group: typing_extensions.Literal["_plan_id", b"_plan_id"]
) -> typing_extensions.Literal["plan_id"] | None: ...
@typing.overload
def WhichOneof(
self, oneof_group: typing_extensions.Literal["_unparsed_target", b"_unparsed_target"]
) -> typing_extensions.Literal["unparsed_target"] | None: ...
class UnresolvedRegex(google.protobuf.message.Message):
"""Represents all of the input attributes to a given relational operator, for example in
"SELECT `(id)?+.+` FROM ...".
"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
COL_NAME_FIELD_NUMBER: builtins.int
PLAN_ID_FIELD_NUMBER: builtins.int
col_name: builtins.str
"""(Required) The column name used to extract column with regex."""
plan_id: builtins.int
"""(Optional) The id of corresponding connect plan."""
def __init__(
self,
*,
col_name: builtins.str = ...,
plan_id: builtins.int | None = ...,
) -> None: ...
def HasField(
self,
field_name: typing_extensions.Literal["_plan_id", b"_plan_id", "plan_id", b"plan_id"],
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"_plan_id", b"_plan_id", "col_name", b"col_name", "plan_id", b"plan_id"
],
) -> None: ...
def WhichOneof(
self, oneof_group: typing_extensions.Literal["_plan_id", b"_plan_id"]
) -> typing_extensions.Literal["plan_id"] | None: ...
class UnresolvedExtractValue(google.protobuf.message.Message):
"""Extracts a value or values from an Expression"""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
CHILD_FIELD_NUMBER: builtins.int
EXTRACTION_FIELD_NUMBER: builtins.int
@property
def child(self) -> global___Expression:
"""(Required) The expression to extract value from, can be
Map, Array, Struct or array of Structs.
"""
@property
def extraction(self) -> global___Expression:
"""(Required) The expression to describe the extraction, can be
key of Map, index of Array, field name of Struct.
"""
def __init__(
self,
*,
child: global___Expression | None = ...,
extraction: global___Expression | None = ...,
) -> None: ...
def HasField(
self,
field_name: typing_extensions.Literal["child", b"child", "extraction", b"extraction"],
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal["child", b"child", "extraction", b"extraction"],
) -> None: ...
class UpdateFields(google.protobuf.message.Message):
"""Add, replace or drop a field of `StructType` expression by name."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
STRUCT_EXPRESSION_FIELD_NUMBER: builtins.int
FIELD_NAME_FIELD_NUMBER: builtins.int
VALUE_EXPRESSION_FIELD_NUMBER: builtins.int
@property
def struct_expression(self) -> global___Expression:
"""(Required) The struct expression."""
field_name: builtins.str
"""(Required) The field name."""
@property
def value_expression(self) -> global___Expression:
"""(Optional) The expression to add or replace.
When not set, it means this field will be dropped.
"""
def __init__(
self,
*,
struct_expression: global___Expression | None = ...,
field_name: builtins.str = ...,
value_expression: global___Expression | None = ...,
) -> None: ...
def HasField(
self,
field_name: typing_extensions.Literal[
"struct_expression", b"struct_expression", "value_expression", b"value_expression"
],
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"field_name",
b"field_name",
"struct_expression",
b"struct_expression",
"value_expression",
b"value_expression",
],
) -> None: ...
class Alias(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
EXPR_FIELD_NUMBER: builtins.int
NAME_FIELD_NUMBER: builtins.int
METADATA_FIELD_NUMBER: builtins.int
@property
def expr(self) -> global___Expression:
"""(Required) The expression that alias will be added on."""
@property
def name(
self,
) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
"""(Required) a list of name parts for the alias.
Scalar columns only has one name that presents.
"""
metadata: builtins.str
"""(Optional) Alias metadata expressed as a JSON map."""
def __init__(
self,
*,
expr: global___Expression | None = ...,
name: collections.abc.Iterable[builtins.str] | None = ...,
metadata: builtins.str | None = ...,
) -> None: ...
def HasField(
self,
field_name: typing_extensions.Literal[
"_metadata", b"_metadata", "expr", b"expr", "metadata", b"metadata"
],
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"_metadata", b"_metadata", "expr", b"expr", "metadata", b"metadata", "name", b"name"
],
) -> None: ...
def WhichOneof(
self, oneof_group: typing_extensions.Literal["_metadata", b"_metadata"]
) -> typing_extensions.Literal["metadata"] | None: ...
class LambdaFunction(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
FUNCTION_FIELD_NUMBER: builtins.int
ARGUMENTS_FIELD_NUMBER: builtins.int
@property
def function(self) -> global___Expression:
"""(Required) The lambda function.
The function body should use 'UnresolvedAttribute' as arguments, the sever side will
replace 'UnresolvedAttribute' with 'UnresolvedNamedLambdaVariable'.
"""
@property
def arguments(
self,
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
global___Expression.UnresolvedNamedLambdaVariable
]:
"""(Required) Function variables. Must contains 1 ~ 3 variables."""
def __init__(
self,
*,
function: global___Expression | None = ...,
arguments: collections.abc.Iterable[global___Expression.UnresolvedNamedLambdaVariable]
| None = ...,
) -> None: ...
def HasField(
self, field_name: typing_extensions.Literal["function", b"function"]
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"arguments", b"arguments", "function", b"function"
],
) -> None: ...
class UnresolvedNamedLambdaVariable(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
NAME_PARTS_FIELD_NUMBER: builtins.int
@property
def name_parts(
self,
) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
"""(Required) a list of name parts for the variable. Must not be empty."""
def __init__(
self,
*,
name_parts: collections.abc.Iterable[builtins.str] | None = ...,
) -> None: ...
def ClearField(
self, field_name: typing_extensions.Literal["name_parts", b"name_parts"]
) -> None: ...
COMMON_FIELD_NUMBER: builtins.int
LITERAL_FIELD_NUMBER: builtins.int
UNRESOLVED_ATTRIBUTE_FIELD_NUMBER: builtins.int
UNRESOLVED_FUNCTION_FIELD_NUMBER: builtins.int
EXPRESSION_STRING_FIELD_NUMBER: builtins.int
UNRESOLVED_STAR_FIELD_NUMBER: builtins.int
ALIAS_FIELD_NUMBER: builtins.int
CAST_FIELD_NUMBER: builtins.int
UNRESOLVED_REGEX_FIELD_NUMBER: builtins.int
SORT_ORDER_FIELD_NUMBER: builtins.int
LAMBDA_FUNCTION_FIELD_NUMBER: builtins.int
WINDOW_FIELD_NUMBER: builtins.int
UNRESOLVED_EXTRACT_VALUE_FIELD_NUMBER: builtins.int
UPDATE_FIELDS_FIELD_NUMBER: builtins.int
UNRESOLVED_NAMED_LAMBDA_VARIABLE_FIELD_NUMBER: builtins.int
COMMON_INLINE_USER_DEFINED_FUNCTION_FIELD_NUMBER: builtins.int
CALL_FUNCTION_FIELD_NUMBER: builtins.int
NAMED_ARGUMENT_EXPRESSION_FIELD_NUMBER: builtins.int
MERGE_ACTION_FIELD_NUMBER: builtins.int
TYPED_AGGREGATE_EXPRESSION_FIELD_NUMBER: builtins.int
SUBQUERY_EXPRESSION_FIELD_NUMBER: builtins.int
EXTENSION_FIELD_NUMBER: builtins.int
@property
def common(self) -> global___ExpressionCommon: ...
@property
def literal(self) -> global___Expression.Literal: ...
@property
def unresolved_attribute(self) -> global___Expression.UnresolvedAttribute: ...
@property
def unresolved_function(self) -> global___Expression.UnresolvedFunction: ...
@property
def expression_string(self) -> global___Expression.ExpressionString: ...
@property
def unresolved_star(self) -> global___Expression.UnresolvedStar: ...
@property
def alias(self) -> global___Expression.Alias: ...
@property
def cast(self) -> global___Expression.Cast: ...
@property
def unresolved_regex(self) -> global___Expression.UnresolvedRegex: ...
@property
def sort_order(self) -> global___Expression.SortOrder: ...
@property
def lambda_function(self) -> global___Expression.LambdaFunction: ...
@property
def window(self) -> global___Expression.Window: ...
@property
def unresolved_extract_value(self) -> global___Expression.UnresolvedExtractValue: ...
@property
def update_fields(self) -> global___Expression.UpdateFields: ...
@property
def unresolved_named_lambda_variable(
self,
) -> global___Expression.UnresolvedNamedLambdaVariable: ...
@property
def common_inline_user_defined_function(self) -> global___CommonInlineUserDefinedFunction: ...
@property
def call_function(self) -> global___CallFunction: ...
@property
def named_argument_expression(self) -> global___NamedArgumentExpression: ...
@property
def merge_action(self) -> global___MergeAction: ...
@property
def typed_aggregate_expression(self) -> global___TypedAggregateExpression: ...
@property
def subquery_expression(self) -> global___SubqueryExpression: ...
@property
def extension(self) -> google.protobuf.any_pb2.Any:
"""This field is used to mark extensions to the protocol. When plugins generate arbitrary
relations they can add them here. During the planning the correct resolution is done.
"""
def __init__(
self,
*,
common: global___ExpressionCommon | None = ...,
literal: global___Expression.Literal | None = ...,
unresolved_attribute: global___Expression.UnresolvedAttribute | None = ...,
unresolved_function: global___Expression.UnresolvedFunction | None = ...,
expression_string: global___Expression.ExpressionString | None = ...,
unresolved_star: global___Expression.UnresolvedStar | None = ...,
alias: global___Expression.Alias | None = ...,
cast: global___Expression.Cast | None = ...,
unresolved_regex: global___Expression.UnresolvedRegex | None = ...,
sort_order: global___Expression.SortOrder | None = ...,
lambda_function: global___Expression.LambdaFunction | None = ...,
window: global___Expression.Window | None = ...,
unresolved_extract_value: global___Expression.UnresolvedExtractValue | None = ...,
update_fields: global___Expression.UpdateFields | None = ...,
unresolved_named_lambda_variable: global___Expression.UnresolvedNamedLambdaVariable
| None = ...,
common_inline_user_defined_function: global___CommonInlineUserDefinedFunction | None = ...,
call_function: global___CallFunction | None = ...,
named_argument_expression: global___NamedArgumentExpression | None = ...,
merge_action: global___MergeAction | None = ...,
typed_aggregate_expression: global___TypedAggregateExpression | None = ...,
subquery_expression: global___SubqueryExpression | None = ...,
extension: google.protobuf.any_pb2.Any | None = ...,
) -> None: ...
def HasField(
self,
field_name: typing_extensions.Literal[
"alias",
b"alias",
"call_function",
b"call_function",
"cast",
b"cast",
"common",
b"common",
"common_inline_user_defined_function",
b"common_inline_user_defined_function",
"expr_type",
b"expr_type",
"expression_string",
b"expression_string",
"extension",
b"extension",
"lambda_function",
b"lambda_function",
"literal",
b"literal",
"merge_action",
b"merge_action",
"named_argument_expression",
b"named_argument_expression",
"sort_order",
b"sort_order",
"subquery_expression",
b"subquery_expression",
"typed_aggregate_expression",
b"typed_aggregate_expression",
"unresolved_attribute",
b"unresolved_attribute",
"unresolved_extract_value",
b"unresolved_extract_value",
"unresolved_function",
b"unresolved_function",
"unresolved_named_lambda_variable",
b"unresolved_named_lambda_variable",
"unresolved_regex",
b"unresolved_regex",
"unresolved_star",
b"unresolved_star",
"update_fields",
b"update_fields",
"window",
b"window",
],
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"alias",
b"alias",
"call_function",
b"call_function",
"cast",
b"cast",
"common",
b"common",
"common_inline_user_defined_function",
b"common_inline_user_defined_function",
"expr_type",
b"expr_type",
"expression_string",
b"expression_string",
"extension",
b"extension",
"lambda_function",
b"lambda_function",
"literal",
b"literal",
"merge_action",
b"merge_action",
"named_argument_expression",
b"named_argument_expression",
"sort_order",
b"sort_order",
"subquery_expression",
b"subquery_expression",
"typed_aggregate_expression",
b"typed_aggregate_expression",
"unresolved_attribute",
b"unresolved_attribute",
"unresolved_extract_value",
b"unresolved_extract_value",
"unresolved_function",
b"unresolved_function",
"unresolved_named_lambda_variable",
b"unresolved_named_lambda_variable",
"unresolved_regex",
b"unresolved_regex",
"unresolved_star",
b"unresolved_star",
"update_fields",
b"update_fields",
"window",
b"window",
],
) -> None: ...
def WhichOneof(
self, oneof_group: typing_extensions.Literal["expr_type", b"expr_type"]
) -> (
typing_extensions.Literal[
"literal",
"unresolved_attribute",
"unresolved_function",
"expression_string",
"unresolved_star",
"alias",
"cast",
"unresolved_regex",
"sort_order",
"lambda_function",
"window",
"unresolved_extract_value",
"update_fields",
"unresolved_named_lambda_variable",
"common_inline_user_defined_function",
"call_function",
"named_argument_expression",
"merge_action",
"typed_aggregate_expression",
"subquery_expression",
"extension",
]
| None
): ...
global___Expression = Expression
class ExpressionCommon(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
ORIGIN_FIELD_NUMBER: builtins.int
@property
def origin(self) -> pyspark.sql.connect.proto.common_pb2.Origin:
"""(Required) Keep the information of the origin for this expression such as stacktrace."""
def __init__(
self,
*,
origin: pyspark.sql.connect.proto.common_pb2.Origin | None = ...,
) -> None: ...
def HasField(
self, field_name: typing_extensions.Literal["origin", b"origin"]
) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["origin", b"origin"]) -> None: ...
global___ExpressionCommon = ExpressionCommon
class CommonInlineUserDefinedFunction(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
FUNCTION_NAME_FIELD_NUMBER: builtins.int
DETERMINISTIC_FIELD_NUMBER: builtins.int
ARGUMENTS_FIELD_NUMBER: builtins.int
PYTHON_UDF_FIELD_NUMBER: builtins.int
SCALAR_SCALA_UDF_FIELD_NUMBER: builtins.int
JAVA_UDF_FIELD_NUMBER: builtins.int
IS_DISTINCT_FIELD_NUMBER: builtins.int
function_name: builtins.str
"""(Required) Name of the user-defined function."""
deterministic: builtins.bool
"""(Optional) Indicate if the user-defined function is deterministic."""
@property
def arguments(
self,
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Expression]:
"""(Optional) Function arguments. Empty arguments are allowed."""
@property
def python_udf(self) -> global___PythonUDF: ...
@property
def scalar_scala_udf(self) -> global___ScalarScalaUDF: ...
@property
def java_udf(self) -> global___JavaUDF: ...
is_distinct: builtins.bool
"""(Required) Indicate if this function should be applied on distinct values."""
def __init__(
self,
*,
function_name: builtins.str = ...,
deterministic: builtins.bool = ...,
arguments: collections.abc.Iterable[global___Expression] | None = ...,
python_udf: global___PythonUDF | None = ...,
scalar_scala_udf: global___ScalarScalaUDF | None = ...,
java_udf: global___JavaUDF | None = ...,
is_distinct: builtins.bool = ...,
) -> None: ...
def HasField(
self,
field_name: typing_extensions.Literal[
"function",
b"function",
"java_udf",
b"java_udf",
"python_udf",
b"python_udf",
"scalar_scala_udf",
b"scalar_scala_udf",
],
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"arguments",
b"arguments",
"deterministic",
b"deterministic",
"function",
b"function",
"function_name",
b"function_name",
"is_distinct",
b"is_distinct",
"java_udf",
b"java_udf",
"python_udf",
b"python_udf",
"scalar_scala_udf",
b"scalar_scala_udf",
],
) -> None: ...
def WhichOneof(
self, oneof_group: typing_extensions.Literal["function", b"function"]
) -> typing_extensions.Literal["python_udf", "scalar_scala_udf", "java_udf"] | None: ...
global___CommonInlineUserDefinedFunction = CommonInlineUserDefinedFunction
class PythonUDF(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
OUTPUT_TYPE_FIELD_NUMBER: builtins.int
EVAL_TYPE_FIELD_NUMBER: builtins.int
COMMAND_FIELD_NUMBER: builtins.int
PYTHON_VER_FIELD_NUMBER: builtins.int
ADDITIONAL_INCLUDES_FIELD_NUMBER: builtins.int
@property
def output_type(self) -> pyspark.sql.connect.proto.types_pb2.DataType:
"""(Required) Output type of the Python UDF"""
eval_type: builtins.int
"""(Required) EvalType of the Python UDF"""
command: builtins.bytes
"""(Required) The encoded commands of the Python UDF"""
python_ver: builtins.str
"""(Required) Python version being used in the client."""
@property
def additional_includes(
self,
) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
"""(Optional) Additional includes for the Python UDF."""
def __init__(
self,
*,
output_type: pyspark.sql.connect.proto.types_pb2.DataType | None = ...,
eval_type: builtins.int = ...,
command: builtins.bytes = ...,
python_ver: builtins.str = ...,
additional_includes: collections.abc.Iterable[builtins.str] | None = ...,
) -> None: ...
def HasField(
self, field_name: typing_extensions.Literal["output_type", b"output_type"]
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"additional_includes",
b"additional_includes",
"command",
b"command",
"eval_type",
b"eval_type",
"output_type",
b"output_type",
"python_ver",
b"python_ver",
],
) -> None: ...
global___PythonUDF = PythonUDF
class ScalarScalaUDF(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
PAYLOAD_FIELD_NUMBER: builtins.int
INPUTTYPES_FIELD_NUMBER: builtins.int
OUTPUTTYPE_FIELD_NUMBER: builtins.int
NULLABLE_FIELD_NUMBER: builtins.int
AGGREGATE_FIELD_NUMBER: builtins.int
payload: builtins.bytes
"""(Required) Serialized JVM object containing UDF definition, input encoders and output encoder"""
@property
def inputTypes(
self,
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
pyspark.sql.connect.proto.types_pb2.DataType
]:
"""(Optional) Input type(s) of the UDF"""
@property
def outputType(self) -> pyspark.sql.connect.proto.types_pb2.DataType:
"""(Required) Output type of the UDF"""
nullable: builtins.bool
"""(Required) True if the UDF can return null value"""
aggregate: builtins.bool
"""(Required) Indicate if the UDF is an aggregate function"""
def __init__(
self,
*,
payload: builtins.bytes = ...,
inputTypes: collections.abc.Iterable[pyspark.sql.connect.proto.types_pb2.DataType]
| None = ...,
outputType: pyspark.sql.connect.proto.types_pb2.DataType | None = ...,
nullable: builtins.bool = ...,
aggregate: builtins.bool = ...,
) -> None: ...
def HasField(
self, field_name: typing_extensions.Literal["outputType", b"outputType"]
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"aggregate",
b"aggregate",
"inputTypes",
b"inputTypes",
"nullable",
b"nullable",
"outputType",
b"outputType",
"payload",
b"payload",
],
) -> None: ...
global___ScalarScalaUDF = ScalarScalaUDF
class JavaUDF(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
CLASS_NAME_FIELD_NUMBER: builtins.int
OUTPUT_TYPE_FIELD_NUMBER: builtins.int
AGGREGATE_FIELD_NUMBER: builtins.int
class_name: builtins.str
"""(Required) Fully qualified name of Java class"""
@property
def output_type(self) -> pyspark.sql.connect.proto.types_pb2.DataType:
"""(Optional) Output type of the Java UDF"""
aggregate: builtins.bool
"""(Required) Indicate if the Java user-defined function is an aggregate function"""
def __init__(
self,
*,
class_name: builtins.str = ...,
output_type: pyspark.sql.connect.proto.types_pb2.DataType | None = ...,
aggregate: builtins.bool = ...,
) -> None: ...
def HasField(
self,
field_name: typing_extensions.Literal[
"_output_type", b"_output_type", "output_type", b"output_type"
],
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"_output_type",
b"_output_type",
"aggregate",
b"aggregate",
"class_name",
b"class_name",
"output_type",
b"output_type",
],
) -> None: ...
def WhichOneof(
self, oneof_group: typing_extensions.Literal["_output_type", b"_output_type"]
) -> typing_extensions.Literal["output_type"] | None: ...
global___JavaUDF = JavaUDF
class TypedAggregateExpression(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
SCALAR_SCALA_UDF_FIELD_NUMBER: builtins.int
@property
def scalar_scala_udf(self) -> global___ScalarScalaUDF:
"""(Required) The aggregate function object packed into bytes."""
def __init__(
self,
*,
scalar_scala_udf: global___ScalarScalaUDF | None = ...,
) -> None: ...
def HasField(
self, field_name: typing_extensions.Literal["scalar_scala_udf", b"scalar_scala_udf"]
) -> builtins.bool: ...
def ClearField(
self, field_name: typing_extensions.Literal["scalar_scala_udf", b"scalar_scala_udf"]
) -> None: ...
global___TypedAggregateExpression = TypedAggregateExpression
class CallFunction(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
FUNCTION_NAME_FIELD_NUMBER: builtins.int
ARGUMENTS_FIELD_NUMBER: builtins.int
function_name: builtins.str
"""(Required) Unparsed name of the SQL function."""
@property
def arguments(
self,
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Expression]:
"""(Optional) Function arguments. Empty arguments are allowed."""
def __init__(
self,
*,
function_name: builtins.str = ...,
arguments: collections.abc.Iterable[global___Expression] | None = ...,
) -> None: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"arguments", b"arguments", "function_name", b"function_name"
],
) -> None: ...
global___CallFunction = CallFunction
class NamedArgumentExpression(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
key: builtins.str
"""(Required) The key of the named argument."""
@property
def value(self) -> global___Expression:
"""(Required) The value expression of the named argument."""
def __init__(
self,
*,
key: builtins.str = ...,
value: global___Expression | None = ...,
) -> None: ...
def HasField(
self, field_name: typing_extensions.Literal["value", b"value"]
) -> builtins.bool: ...
def ClearField(
self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]
) -> None: ...
global___NamedArgumentExpression = NamedArgumentExpression
class MergeAction(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
class _ActionType:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _ActionTypeEnumTypeWrapper(
google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[
MergeAction._ActionType.ValueType
],
builtins.type,
): # noqa: F821
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
ACTION_TYPE_INVALID: MergeAction._ActionType.ValueType # 0
ACTION_TYPE_DELETE: MergeAction._ActionType.ValueType # 1
ACTION_TYPE_INSERT: MergeAction._ActionType.ValueType # 2
ACTION_TYPE_INSERT_STAR: MergeAction._ActionType.ValueType # 3
ACTION_TYPE_UPDATE: MergeAction._ActionType.ValueType # 4
ACTION_TYPE_UPDATE_STAR: MergeAction._ActionType.ValueType # 5
class ActionType(_ActionType, metaclass=_ActionTypeEnumTypeWrapper): ...
ACTION_TYPE_INVALID: MergeAction.ActionType.ValueType # 0
ACTION_TYPE_DELETE: MergeAction.ActionType.ValueType # 1
ACTION_TYPE_INSERT: MergeAction.ActionType.ValueType # 2
ACTION_TYPE_INSERT_STAR: MergeAction.ActionType.ValueType # 3
ACTION_TYPE_UPDATE: MergeAction.ActionType.ValueType # 4
ACTION_TYPE_UPDATE_STAR: MergeAction.ActionType.ValueType # 5
class Assignment(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
@property
def key(self) -> global___Expression:
"""(Required) The key of the assignment."""
@property
def value(self) -> global___Expression:
"""(Required) The value of the assignment."""
def __init__(
self,
*,
key: global___Expression | None = ...,
value: global___Expression | None = ...,
) -> None: ...
def HasField(
self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]
) -> builtins.bool: ...
def ClearField(
self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]
) -> None: ...
ACTION_TYPE_FIELD_NUMBER: builtins.int
CONDITION_FIELD_NUMBER: builtins.int
ASSIGNMENTS_FIELD_NUMBER: builtins.int
action_type: global___MergeAction.ActionType.ValueType
"""(Required) The action type of the merge action."""
@property
def condition(self) -> global___Expression:
"""(Optional) The condition expression of the merge action."""
@property
def assignments(
self,
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
global___MergeAction.Assignment
]:
"""(Optional) The assignments of the merge action. Required for ActionTypes INSERT and UPDATE."""
def __init__(
self,
*,
action_type: global___MergeAction.ActionType.ValueType = ...,
condition: global___Expression | None = ...,
assignments: collections.abc.Iterable[global___MergeAction.Assignment] | None = ...,
) -> None: ...
def HasField(
self,
field_name: typing_extensions.Literal[
"_condition", b"_condition", "condition", b"condition"
],
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"_condition",
b"_condition",
"action_type",
b"action_type",
"assignments",
b"assignments",
"condition",
b"condition",
],
) -> None: ...
def WhichOneof(
self, oneof_group: typing_extensions.Literal["_condition", b"_condition"]
) -> typing_extensions.Literal["condition"] | None: ...
global___MergeAction = MergeAction
class SubqueryExpression(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
class _SubqueryType:
ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
class _SubqueryTypeEnumTypeWrapper(
google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[
SubqueryExpression._SubqueryType.ValueType
],
builtins.type,
): # noqa: F821
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
SUBQUERY_TYPE_UNKNOWN: SubqueryExpression._SubqueryType.ValueType # 0
SUBQUERY_TYPE_SCALAR: SubqueryExpression._SubqueryType.ValueType # 1
SUBQUERY_TYPE_EXISTS: SubqueryExpression._SubqueryType.ValueType # 2
SUBQUERY_TYPE_TABLE_ARG: SubqueryExpression._SubqueryType.ValueType # 3
SUBQUERY_TYPE_IN: SubqueryExpression._SubqueryType.ValueType # 4
class SubqueryType(_SubqueryType, metaclass=_SubqueryTypeEnumTypeWrapper): ...
SUBQUERY_TYPE_UNKNOWN: SubqueryExpression.SubqueryType.ValueType # 0
SUBQUERY_TYPE_SCALAR: SubqueryExpression.SubqueryType.ValueType # 1
SUBQUERY_TYPE_EXISTS: SubqueryExpression.SubqueryType.ValueType # 2
SUBQUERY_TYPE_TABLE_ARG: SubqueryExpression.SubqueryType.ValueType # 3
SUBQUERY_TYPE_IN: SubqueryExpression.SubqueryType.ValueType # 4
class TableArgOptions(google.protobuf.message.Message):
"""Nested message for table argument options."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor
PARTITION_SPEC_FIELD_NUMBER: builtins.int
ORDER_SPEC_FIELD_NUMBER: builtins.int
WITH_SINGLE_PARTITION_FIELD_NUMBER: builtins.int
@property
def partition_spec(
self,
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
global___Expression
]:
"""(Optional) The way that input rows are partitioned."""
@property
def order_spec(
self,
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
global___Expression.SortOrder
]:
"""(Optional) Ordering of rows in a partition."""
with_single_partition: builtins.bool
"""(Optional) Whether this is a single partition."""
def __init__(
self,
*,
partition_spec: collections.abc.Iterable[global___Expression] | None = ...,
order_spec: collections.abc.Iterable[global___Expression.SortOrder] | None = ...,
with_single_partition: builtins.bool | None = ...,
) -> None: ...
def HasField(
self,
field_name: typing_extensions.Literal[
"_with_single_partition",
b"_with_single_partition",
"with_single_partition",
b"with_single_partition",
],
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"_with_single_partition",
b"_with_single_partition",
"order_spec",
b"order_spec",
"partition_spec",
b"partition_spec",
"with_single_partition",
b"with_single_partition",
],
) -> None: ...
def WhichOneof(
self,
oneof_group: typing_extensions.Literal[
"_with_single_partition", b"_with_single_partition"
],
) -> typing_extensions.Literal["with_single_partition"] | None: ...
PLAN_ID_FIELD_NUMBER: builtins.int
SUBQUERY_TYPE_FIELD_NUMBER: builtins.int
TABLE_ARG_OPTIONS_FIELD_NUMBER: builtins.int
IN_SUBQUERY_VALUES_FIELD_NUMBER: builtins.int
plan_id: builtins.int
"""(Required) The ID of the corresponding connect plan."""
subquery_type: global___SubqueryExpression.SubqueryType.ValueType
"""(Required) The type of the subquery."""
@property
def table_arg_options(self) -> global___SubqueryExpression.TableArgOptions:
"""(Optional) Options specific to table arguments."""
@property
def in_subquery_values(
self,
) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Expression]:
"""(Optional) IN subquery values."""
def __init__(
self,
*,
plan_id: builtins.int = ...,
subquery_type: global___SubqueryExpression.SubqueryType.ValueType = ...,
table_arg_options: global___SubqueryExpression.TableArgOptions | None = ...,
in_subquery_values: collections.abc.Iterable[global___Expression] | None = ...,
) -> None: ...
def HasField(
self,
field_name: typing_extensions.Literal[
"_table_arg_options", b"_table_arg_options", "table_arg_options", b"table_arg_options"
],
) -> builtins.bool: ...
def ClearField(
self,
field_name: typing_extensions.Literal[
"_table_arg_options",
b"_table_arg_options",
"in_subquery_values",
b"in_subquery_values",
"plan_id",
b"plan_id",
"subquery_type",
b"subquery_type",
"table_arg_options",
b"table_arg_options",
],
) -> None: ...
def WhichOneof(
self, oneof_group: typing_extensions.Literal["_table_arg_options", b"_table_arg_options"]
) -> typing_extensions.Literal["table_arg_options"] | None: ...
global___SubqueryExpression = SubqueryExpression