diff --git a/ibis/backends/tests/test_export.py b/ibis/backends/tests/test_export.py index 8d8af79a20b7b..7c05323062c8b 100644 --- a/ibis/backends/tests/test_export.py +++ b/ibis/backends/tests/test_export.py @@ -44,6 +44,19 @@ limit_no_limit = limit + no_limit +def test_table___arrow_c_stream__(awards_players): + if not hasattr(pa.Table, "__arrow_c_stream__"): + pytest.skip("Arrow PyCapsule interface not available in this pyarrow version") + # Smoketest for __arrow_c_stream__ interface + # Here we just plumb through to the underlying pyarrow implementation + obj = awards_players.__arrow_c_stream__() + assert obj is not None + + schema = awards_players.schema().to_pyarrow().__arrow_c_schema__() + obj2 = awards_players.__arrow_c_stream__(schema) + assert obj2 is not None + + @pytest.mark.parametrize("limit", limit_no_limit) def test_table_to_pyarrow_batches(limit, awards_players): with awards_players.to_pyarrow_batches(limit=limit) as batch_reader: diff --git a/ibis/expr/types/relations.py b/ibis/expr/types/relations.py index 387a8a60a1d79..29838bfe323c2 100644 --- a/ibis/expr/types/relations.py +++ b/ibis/expr/types/relations.py @@ -188,6 +188,9 @@ def __dataframe__(self, nan_as_null: bool = False, allow_copy: bool = True): return IbisDataFrame(self, nan_as_null=nan_as_null, allow_copy=allow_copy) + def __arrow_c_stream__(self, requested_schema: object | None = None) -> object: + return self.to_pyarrow().__arrow_c_stream__(requested_schema) + def __pyarrow_result__( self, table: pa.Table, data_mapper: type[PyArrowData] | None = None ) -> pa.Table: