-
-
Notifications
You must be signed in to change notification settings - Fork 2.1k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
feat(python): Implement Arrow PyCapsule Interface for Series/DataFrame export #17676
Changes from 8 commits
435b972
11e45fc
a084a45
d9c8a80
eb9b960
59c2048
8226931
ab015a1
ecf5334
b309a57
5f23484
cc04d92
c2e2144
d40f696
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,9 +1,16 @@ | ||
use std::ffi::CString; | ||
kylebarron marked this conversation as resolved.
Show resolved
Hide resolved
|
||
|
||
use arrow::ffi; | ||
use arrow::record_batch::RecordBatch; | ||
use polars::datatypes::CompatLevel; | ||
use polars::frame::DataFrame; | ||
use polars::prelude::{ArrayRef, ArrowField}; | ||
use polars::series::Series; | ||
use polars_core::utils::arrow; | ||
use polars_error::PolarsResult; | ||
use pyo3::ffi::Py_uintptr_t; | ||
use pyo3::prelude::*; | ||
use pyo3::types::PyCapsule; | ||
|
||
/// Arrow array to Python. | ||
pub(crate) fn to_py_array( | ||
|
@@ -49,3 +56,74 @@ pub(crate) fn to_py_rb( | |
|
||
Ok(record.to_object(py)) | ||
} | ||
|
||
/// Export a series to a C stream via a PyCapsule according to the Arrow PyCapsule Interface | ||
/// https://arrow.apache.org/docs/dev/format/CDataInterface/PyCapsuleInterface.html | ||
pub(crate) fn series_to_stream<'py>( | ||
series: &'py Series, | ||
py: Python<'py>, | ||
) -> PyResult<Bound<'py, PyCapsule>> { | ||
let field = series.field().to_arrow(CompatLevel::oldest()); | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I do think this should be There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Why There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
Does the protocol allow for this? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
https://arrow.apache.org/docs/dev/format/CDataInterface/PyCapsuleInterface.html#schema-requests There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Right, then I agree There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. There's been discussion about this in apache/arrow#39689. To be able to pass in a I believe I summarized the consensus in apache/arrow#39689 (comment), but while waiting for confirmation, I think it would be best for us to leave |
||
let iter = Box::new(series.chunks().clone().into_iter().map(Ok)) as _; | ||
let stream = ffi::export_iterator(iter, field); | ||
let stream_capsule_name = CString::new("arrow_array_stream").unwrap(); | ||
PyCapsule::new_bound(py, stream, Some(stream_capsule_name)) | ||
} | ||
|
||
pub(crate) fn dataframe_to_stream(df: DataFrame, py: Python) -> PyResult<Bound<'_, PyCapsule>> { | ||
let iter = Box::new(DataFrameStreamIterator::new(df)); | ||
let field = iter.field(); | ||
let stream = ffi::export_iterator(iter, field); | ||
let stream_capsule_name = CString::new("arrow_array_stream").unwrap(); | ||
PyCapsule::new_bound(py, stream, Some(stream_capsule_name)) | ||
} | ||
|
||
pub struct DataFrameStreamIterator { | ||
columns: Vec<polars::series::Series>, | ||
data_type: arrow::datatypes::ArrowDataType, | ||
kylebarron marked this conversation as resolved.
Show resolved
Hide resolved
|
||
idx: usize, | ||
n_chunks: usize, | ||
} | ||
|
||
impl DataFrameStreamIterator { | ||
fn new(df: polars::frame::DataFrame) -> Self { | ||
kylebarron marked this conversation as resolved.
Show resolved
Hide resolved
|
||
let schema = df.schema().to_arrow(CompatLevel::oldest()); | ||
kylebarron marked this conversation as resolved.
Show resolved
Hide resolved
|
||
let data_type = arrow::datatypes::ArrowDataType::Struct(schema.fields); | ||
|
||
Self { | ||
columns: df.get_columns().to_vec(), | ||
data_type, | ||
idx: 0, | ||
n_chunks: df.n_chunks(), | ||
} | ||
} | ||
|
||
fn field(&self) -> ArrowField { | ||
ArrowField::new("", self.data_type.clone(), false) | ||
} | ||
} | ||
|
||
impl Iterator for DataFrameStreamIterator { | ||
type Item = PolarsResult<Box<dyn arrow::array::Array>>; | ||
kylebarron marked this conversation as resolved.
Show resolved
Hide resolved
|
||
|
||
fn next(&mut self) -> Option<Self::Item> { | ||
if self.idx >= self.n_chunks { | ||
None | ||
} else { | ||
// create a batch of the columns with the same chunk no. | ||
let batch_cols = self | ||
.columns | ||
.iter() | ||
.map(|s| s.to_arrow(self.idx, CompatLevel::oldest())) | ||
kylebarron marked this conversation as resolved.
Show resolved
Hide resolved
|
||
.collect(); | ||
self.idx += 1; | ||
|
||
let array = arrow::array::StructArray::new( | ||
self.data_type.clone(), | ||
batch_cols, | ||
std::option::Option::None, | ||
kylebarron marked this conversation as resolved.
Show resolved
Hide resolved
|
||
); | ||
Some(std::result::Result::Ok(Box::new(array))) | ||
kylebarron marked this conversation as resolved.
Show resolved
Hide resolved
|
||
} | ||
} | ||
} |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -749,3 +749,32 @@ def test_compat_level(monkeypatch: pytest.MonkeyPatch) -> None: | |
assert len(df.write_ipc_stream(None).getbuffer()) == 544 | ||
assert len(df.write_ipc_stream(None, compat_level=oldest).getbuffer()) == 672 | ||
assert len(df.write_ipc_stream(None, compat_level=newest).getbuffer()) == 544 | ||
|
||
|
||
def test_df_pycapsule_interface() -> None: | ||
class PyCapsuleStreamHolder: | ||
""" | ||
Hold the Arrow C Stream pycapsule. | ||
|
||
A class that exposes _only_ the Arrow C Stream interface via Arrow PyCapsules. | ||
This ensures that pyarrow is seeing _only_ the `__arrow_c_stream__` dunder, and | ||
that nothing else (e.g. the dataframe or array interface) is actually being | ||
used. | ||
""" | ||
|
||
capsule: object | ||
|
||
def __init__(self, capsule: object) -> None: | ||
self.capsule = capsule | ||
|
||
def __arrow_c_stream__(self, requested_schema: object) -> object: | ||
return self.capsule | ||
|
||
df = pl.DataFrame({"a": [1, 2, 3], "b": ["a", "b", "c"]}) | ||
kylebarron marked this conversation as resolved.
Show resolved
Hide resolved
|
||
out = pa.table(PyCapsuleStreamHolder(df.__arrow_c_stream__(None))) | ||
assert df.shape == out.shape | ||
assert df.schema.names() == out.schema.names | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. You could drop There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I updated the test to not hold a bare capsule, but rather call the underlying object's |
||
|
||
df2 = pl.from_arrow(out) | ||
assert isinstance(df2, pl.DataFrame) | ||
assert df.equals(df2) |
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -628,6 +628,31 @@ def test_arrow() -> None: | |
) | ||
|
||
|
||
def test_pycapsule_interface() -> None: | ||
class PyCapsuleSeriesHolder: | ||
""" | ||
Hold the Arrow C Stream pycapsule. | ||
|
||
A class that exposes _only_ the Arrow C Stream interface via Arrow PyCapsules. | ||
This ensures that pyarrow is seeing _only_ the `__arrow_c_stream__` dunder, and | ||
that nothing else (e.g. the dataframe or array interface) is actually being | ||
used. | ||
""" | ||
|
||
capsule: object | ||
|
||
def __init__(self, capsule: object): | ||
self.capsule = capsule | ||
|
||
def __arrow_c_stream__(self, requested_schema: object) -> object: | ||
return self.capsule | ||
|
||
a = pl.Series("a", [1, 2, 3, None]) | ||
out = pa.chunked_array(PyCapsuleSeriesHolder(a.__arrow_c_stream__(None))) | ||
out_arr = out.combine_chunks() | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Same idea here (drop |
||
assert out_arr == pa.array([1, 2, 3, None]) | ||
|
||
|
||
def test_get() -> None: | ||
a = pl.Series("a", [1, 2, 3]) | ||
pos_idxs = pl.Series("idxs", [2, 0, 1, 0], dtype=pl.Int8) | ||
|
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Arrow-rs also implements this: https://github.com/apache/arrow-rs/blob/6d4e2f2ceaf423031b0bc72f54c547dd77a0ddbb/arrow-array/src/ffi_stream.rs#L100