Skip to content

Commit

Permalink
First pass at unblocking dask-expr issues - replace _Frame usage
Browse files Browse the repository at this point in the history
  • Loading branch information
charlesbluca committed Mar 22, 2024
1 parent 61513d3 commit 0fb485d
Show file tree
Hide file tree
Showing 3 changed files with 11 additions and 11 deletions.
6 changes: 3 additions & 3 deletions dask_sql/physical/rel/custom/wrappers.py
Original file line number Diff line number Diff line change
Expand Up @@ -207,7 +207,7 @@ def transform(self, X):
estimator=self._postfit_estimator,
meta=output_meta,
)
elif isinstance(X, dd._Frame):
elif isinstance(X, dd.DataFrame):
if output_meta is None:
output_meta = _transform(X._meta_nonempty, self._postfit_estimator)
try:
Expand Down Expand Up @@ -305,7 +305,7 @@ def predict(self, X):
)
return result

elif isinstance(X, dd._Frame):
elif isinstance(X, dd.DataFrame):
if output_meta is None:
# dask-dataframe relies on dd.core.no_default
# for infering meta
Expand Down Expand Up @@ -364,7 +364,7 @@ def predict_proba(self, X):
meta=output_meta,
chunks=(X.chunks[0], len(self._postfit_estimator.classes_)),
)
elif isinstance(X, dd._Frame):
elif isinstance(X, dd.DataFrame):
if output_meta is None:
# dask-dataframe relies on dd.core.no_default
# for infering meta
Expand Down
14 changes: 7 additions & 7 deletions dask_sql/physical/utils/filter.py
Original file line number Diff line number Diff line change
Expand Up @@ -304,10 +304,10 @@ def combine(self, other: DNF | _And | _Or | list | tuple | None) -> DNF:
# Specify functions that must be generated with
# a different API at the dataframe-collection level
_special_op_mappings = {
M.fillna: dd._Frame.fillna,
M.isin: dd._Frame.isin,
M.isna: dd._Frame.isna,
M.astype: dd._Frame.astype,
M.fillna: dd.DataFrame.fillna,
M.isin: dd.DataFrame.isin,
M.isna: dd.DataFrame.isna,
M.astype: dd.DataFrame.astype,
}

# Convert _pass_through_ops to respect "special" mappings
Expand All @@ -316,7 +316,7 @@ def combine(self, other: DNF | _And | _Or | list | tuple | None) -> DNF:

def _preprocess_layers(input_layers):
# NOTE: This is a Layer-specific work-around to deal with
# the fact that `dd._Frame.isin(values)` will add a distinct
# the fact that `dd.DataFrame.isin(values)` will add a distinct
# `MaterializedLayer` for the `values` argument.
# See: https://github.com/dask-contrib/dask-sql/issues/607
skip = set()
Expand Down Expand Up @@ -418,9 +418,9 @@ def _dnf_filter_expression(self, dsk):
func = _blockwise_logical_dnf
elif op == operator.getitem:
func = _blockwise_getitem_dnf
elif op == dd._Frame.isin:
elif op == dd.DataFrame.isin:
func = _blockwise_isin_dnf
elif op == dd._Frame.isna:
elif op == dd.DataFrame.isna:
func = _blockwise_isna_dnf
elif op == operator.inv:
func = _blockwise_inv_dnf
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/test_ml_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ def make_classification(

def _assert_eq(l, r, name=None, **kwargs):
array_types = (np.ndarray, da.Array)
frame_types = (pd.core.generic.NDFrame, dd._Frame)
frame_types = (pd.core.generic.NDFrame, dd.DataFrame)
if isinstance(l, array_types):
assert_eq_ar(l, r, **kwargs)
elif isinstance(l, frame_types):
Expand Down

0 comments on commit 0fb485d

Please sign in to comment.