Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[FLINK-37365][pyflink] Add API stability decorators for PyFlink APIs #26247

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 22 additions & 0 deletions flink-python/pyflink/table/catalog.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
from pyflink.util.java_utils import to_jarray
from typing import Dict, List, Optional, Union
from abc import ABCMeta, abstractmethod
from pyflink.util.api_stability_decorators import PublicEvolving

__all__ = ['Catalog', 'CatalogDatabase', 'CatalogBaseTable', 'CatalogPartition', 'CatalogFunction',
'Procedure', 'ObjectPath', 'CatalogPartitionSpec', 'CatalogTableStatistics',
Expand All @@ -36,6 +37,7 @@
'Constraint', 'UniqueConstraint', 'ResolvedSchema']


@PublicEvolving()
Copy link
Contributor

@snuyanzin snuyanzin Mar 13, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Great that there is @PublicEvolving()
out of curiosity: is there a way to have a kind test comparing java/scala classes annotated with @PublicEvolving and similar entities in python and failing if there is something annotated in java/scala and not annotated in python and vice versa.
Sure not under this PR, just in general as an idea...

class Catalog(object):
"""
Catalog is responsible for reading and writing metadata such as database/table/views/UDFs
Expand Down Expand Up @@ -733,6 +735,7 @@ def alter_partition_column_statistics(self,
ignore_if_not_exists)


@PublicEvolving()
class CatalogDatabase(object):
"""
Represents a database object in a catalog.
Expand Down Expand Up @@ -809,6 +812,7 @@ def get_detailed_description(self) -> Optional[str]:
return None


@PublicEvolving()
class CatalogBaseTable(object):
"""
CatalogBaseTable is the common parent of table and view. It has a map of
Expand Down Expand Up @@ -943,6 +947,7 @@ def get_detailed_description(self) -> Optional[str]:
return None


@PublicEvolving()
class CatalogPartition(object):
"""
Represents a partition object in catalog.
Expand Down Expand Up @@ -1022,6 +1027,7 @@ def get_comment(self) -> str:
return self._j_catalog_partition.getComment()


@PublicEvolving()
class CatalogFunction(object):
"""
Interface for a function in a catalog.
Expand Down Expand Up @@ -1113,6 +1119,7 @@ def get_function_language(self):
return self._j_catalog_function.getFunctionLanguage()


@PublicEvolving()
class CatalogModel(object):
"""
Interface for a model in a catalog.
Expand Down Expand Up @@ -1176,6 +1183,7 @@ def get_options(self):
return dict(self._j_catalog_model.getOptions())


@PublicEvolving()
class Procedure(object):
"""
Interface for a procedure in a catalog.
Expand All @@ -1189,6 +1197,7 @@ def _get(j_procedure):
return Procedure(j_procedure)


@PublicEvolving()
class ObjectPath(object):
"""
A database name and object (table/view/function) name combo in a catalog.
Expand Down Expand Up @@ -1226,6 +1235,7 @@ def from_string(full_name: str) -> 'ObjectPath':
return ObjectPath(j_object_path=gateway.jvm.ObjectPath.fromString(full_name))


@PublicEvolving()
class CatalogPartitionSpec(object):
"""
Represents a partition spec object in catalog.
Expand Down Expand Up @@ -1259,6 +1269,7 @@ def get_partition_spec(self) -> Dict[str, str]:
return dict(self._j_catalog_partition_spec.getPartitionSpec())


@PublicEvolving()
class CatalogTableStatistics(object):
"""
Statistics for a non-partitioned table or a partition of a partitioned table.
Expand Down Expand Up @@ -1313,6 +1324,7 @@ def copy(self) -> 'CatalogTableStatistics':
j_catalog_table_statistics=self._j_catalog_table_statistics.copy())


@PublicEvolving()
class CatalogColumnStatistics(object):
"""
Column statistics of a table or partition.
Expand Down Expand Up @@ -1379,6 +1391,7 @@ def __init__(self, catalog_name: str, default_database: str, username: str, pwd:
super(JdbcCatalog, self).__init__(j_jdbc_catalog)


@PublicEvolving()
class CatalogDescriptor:
"""
Describes a catalog with the catalog name and configuration.
Expand Down Expand Up @@ -1483,6 +1496,7 @@ def as_summary_string(self) -> str:
return self._j_object_identifier.asSummaryString()


@PublicEvolving()
class Column(metaclass=ABCMeta):
"""
Representation of a column in a :class:`ResolvedSchema`.
Expand Down Expand Up @@ -1642,6 +1656,7 @@ def rename(self, new_name: str) -> "Column":
pass


@PublicEvolving()
class PhysicalColumn(Column):
"""
Representation of a physical column.
Expand Down Expand Up @@ -1670,6 +1685,7 @@ def rename(self, new_name: str) -> Column:
return self._j_physical_column.rename(new_name)


@PublicEvolving()
class ComputedColumn(Column):
"""
Representation of a computed column.
Expand Down Expand Up @@ -1710,6 +1726,7 @@ def rename(self, new_name: str) -> Column:
return self._j_computed_column.rename(new_name)


@PublicEvolving()
class MetadataColumn(Column):
"""
Representation of a metadata column.
Expand Down Expand Up @@ -1754,6 +1771,7 @@ def rename(self, new_name: str) -> Column:
return self._j_metadata_column.rename(new_name)


@PublicEvolving()
class WatermarkSpec:
"""
Representation of a watermark specification in :class:`ResolvedSchema`.
Expand Down Expand Up @@ -1811,6 +1829,7 @@ def as_summary_string(self) -> str:
return self._j_watermark_spec.asSummaryString()


@PublicEvolving()
class Constraint(metaclass=ABCMeta):
"""
Integrity constraints, generally referred to simply as constraints, define the valid states of
Expand Down Expand Up @@ -1849,6 +1868,7 @@ def as_summary_string(self) -> str:
"""
return self._j_constraint.asSummaryString()

@PublicEvolving()
class ConstraintType(Enum):
"""
Type of the constraint.
Expand All @@ -1866,6 +1886,7 @@ class ConstraintType(Enum):
UNIQUE_KEY = 1


@PublicEvolving()
class UniqueConstraint(Constraint):
"""
A unique key constraint. It can be declared also as a PRIMARY KEY.
Expand Down Expand Up @@ -1899,6 +1920,7 @@ def get_type_string(self) -> str:
return self._j_unique_constraint.getTypeString()


@PublicEvolving()
class ResolvedSchema(object):
"""
Schema of a table or view consisting of columns, constraints, and watermark specifications.
Expand Down
12 changes: 12 additions & 0 deletions flink-python/pyflink/table/changelog_mode.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,12 @@
# limitations under the License.
################################################################################
from pyflink.java_gateway import get_gateway
from pyflink.util.api_stability_decorators import PublicEvolving

__all__ = ['ChangelogMode']


@PublicEvolving()
class ChangelogMode(object):
"""
The set of changes contained in a changelog.
Expand All @@ -30,18 +32,28 @@ def __init__(self, j_changelog_mode):

@staticmethod
def insert_only():
"""
Shortcut for a simple :attr:`~pyflink.common.RowKind.INSERT`-only changelog.
"""
gateway = get_gateway()
return ChangelogMode(
gateway.jvm.org.apache.flink.table.connector.ChangelogMode.insertOnly())

@staticmethod
def upsert():
"""
Shortcut for an upsert changelog that describes idempotent updates on a key and thus does
does not contain :attr:`~pyflink.common.RowKind.UPDATE_BEFORE` rows.
"""
gateway = get_gateway()
return ChangelogMode(
gateway.jvm.org.apache.flink.table.connector.ChangelogMode.upsert())

@staticmethod
def all():
"""
Shortcut for a changelog that can contain all :class:`~pyflink.common.RowKind`.
"""
gateway = get_gateway()
return ChangelogMode(
gateway.jvm.org.apache.flink.table.connector.ChangelogMode.all())
5 changes: 5 additions & 0 deletions flink-python/pyflink/table/data_view.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,13 +18,16 @@
from abc import ABC, abstractmethod
from typing import TypeVar, Generic, Iterable, List, Any, Iterator, Dict, Tuple

from pyflink.util.api_stability_decorators import PublicEvolving

T = TypeVar('T')
K = TypeVar('K')
V = TypeVar('V')

__all__ = ['DataView', 'ListView', 'MapView']


@PublicEvolving()
class DataView(ABC):
"""
A DataView is a collection type that can be used in the accumulator of an user defined
Expand All @@ -40,6 +43,7 @@ def clear(self) -> None:
pass


@PublicEvolving()
class ListView(DataView, Generic[T]):
"""
A :class:`DataView` that provides list-like functionality in the accumulator of an
Expand Down Expand Up @@ -102,6 +106,7 @@ def __iter__(self) -> Iterator[T]:
return iter(self.get())


@PublicEvolving()
class MapView(Generic[K, V]):
"""
A :class:`DataView` that provides dict-like functionality in the accumulator of an
Expand Down
2 changes: 2 additions & 0 deletions flink-python/pyflink/table/environment_settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,13 +17,15 @@
################################################################################

from pyflink.java_gateway import get_gateway
from pyflink.util.api_stability_decorators import PublicEvolving
from pyflink.util.java_utils import create_url_class_loader

from pyflink.common import Configuration

__all__ = ['EnvironmentSettings']


@PublicEvolving()
class EnvironmentSettings(object):
"""
Defines all parameters that initialize a table environment. Those parameters are used only
Expand Down
3 changes: 3 additions & 0 deletions flink-python/pyflink/table/explain_detail.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,10 @@

__all__ = ['ExplainDetail']

from pyflink.util.api_stability_decorators import PublicEvolving


@PublicEvolving()
class ExplainDetail(object):
"""
ExplainDetail defines the types of details for explain result.
Expand Down
5 changes: 5 additions & 0 deletions flink-python/pyflink/table/module.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,10 @@

__all__ = ['HiveModule', 'Module', 'ModuleEntry']

from pyflink.util.api_stability_decorators import PublicEvolving


@PublicEvolving()
class Module(object):
"""
Modules define a set of metadata, including functions, user defined types, operators, rules,
Expand All @@ -34,6 +37,7 @@ def __init__(self, j_module):
self._j_module = j_module


@PublicEvolving()
class HiveModule(Module):
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Not sure why this is here. The Hive module got factored out of the Flink core repository? Maybe a followup issue to remove it? Maybe don't annotate it?

"""
Module to provide Hive built-in metadata.
Expand All @@ -51,6 +55,7 @@ def __init__(self, hive_version: str = None):
super(HiveModule, self).__init__(j_hive_module)


@PublicEvolving()
class ModuleEntry(object):
"""
A POJO to represent a module's name and use status.
Expand Down
2 changes: 2 additions & 0 deletions flink-python/pyflink/table/resolved_expression.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,10 +19,12 @@

from pyflink.table import Expression
from pyflink.table.types import DataType, _from_java_data_type
from pyflink.util.api_stability_decorators import PublicEvolving

__all__ = ["ResolvedExpression"]


@PublicEvolving()
class ResolvedExpression(Expression):
"""
Expression that has been fully resolved and validated.
Expand Down
3 changes: 3 additions & 0 deletions flink-python/pyflink/table/result_kind.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,10 @@

__all__ = ['ResultKind']

from pyflink.util.api_stability_decorators import PublicEvolving


@PublicEvolving()
class ResultKind(object):
"""
ResultKind defines the types of the result.
Expand Down
3 changes: 3 additions & 0 deletions flink-python/pyflink/table/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
from pyflink.table import Expression
from pyflink.table.expression import _get_java_expression
from pyflink.table.types import DataType, _to_java_data_type
from pyflink.util.api_stability_decorators import PublicEvolving
from pyflink.util.java_utils import to_jarray

if TYPE_CHECKING:
Expand All @@ -31,6 +32,7 @@
__all__ = ['Schema']


@PublicEvolving()
class Schema(object):
"""
Schema of a table or view.
Expand Down Expand Up @@ -67,6 +69,7 @@ def __eq__(self, other):
def __hash__(self):
return self._j_schema.hashCode()

@PublicEvolving()
class Builder(object):
"""
A builder for constructing an immutable but still unresolved Schema.
Expand Down
2 changes: 2 additions & 0 deletions flink-python/pyflink/table/sql_dialect.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,12 @@
# limitations under the License.
################################################################################
from pyflink.java_gateway import get_gateway
from pyflink.util.api_stability_decorators import PublicEvolving

__all__ = ['SqlDialect']


@PublicEvolving()
class SqlDialect(object):
"""
Enumeration of valid SQL compatibility modes.
Expand Down
2 changes: 2 additions & 0 deletions flink-python/pyflink/table/statement_set.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,11 +22,13 @@
from pyflink.table.table_descriptor import TableDescriptor
from pyflink.table.table_pipeline import TablePipeline
from pyflink.table.table_result import TableResult
from pyflink.util.api_stability_decorators import PublicEvolving
from pyflink.util.java_utils import to_j_explain_detail_arr

__all__ = ['StatementSet']


@PublicEvolving()
class StatementSet(object):
"""
A :class:`~StatementSet` accepts pipelines defined by DML statements or :class:`~Table` objects.
Expand Down
Loading