Skip to content

Commit 8f1b241

Browse files
authored
Clean up tests after legacy DataFrame removal (#8972)
1 parent fd6149e commit 8f1b241

File tree

4 files changed

+8
-28
lines changed

4 files changed

+8
-28
lines changed

distributed/protocol/tests/test_highlevelgraph.py

+3-11
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,5 @@
11
from __future__ import annotations
22

3-
import contextlib
4-
53
import pytest
64

75
np = pytest.importorskip("numpy")
@@ -175,11 +173,9 @@ async def test_dataframe_annotations(c, s, a, b):
175173
acol = df["a"]
176174
bcol = df["b"]
177175

178-
ctx = contextlib.nullcontext()
179-
if dd._dask_expr_enabled():
180-
ctx = pytest.warns(
181-
UserWarning, match="Annotations will be ignored when using query-planning"
182-
)
176+
ctx = pytest.warns(
177+
UserWarning, match="Annotations will be ignored when using query-planning"
178+
)
183179

184180
with dask.annotate(retries=retries), ctx:
185181
df = acol + bcol
@@ -189,7 +185,3 @@ async def test_dataframe_annotations(c, s, a, b):
189185

190186
assert rdf.dtypes == np.float64
191187
assert (rdf == 10.0).all()
192-
193-
if not dd._dask_expr_enabled():
194-
# There is an annotation match per partition (i.e. task)
195-
assert plugin.retry_matches == df.npartitions

distributed/shuffle/tests/test_merge.py

+3-6
Original file line numberDiff line numberDiff line change
@@ -56,13 +56,10 @@ async def test_basic_merge(c, s, a, b, how):
5656

5757
joined = a.merge(b, left_on="y", right_on="y", how=how)
5858

59-
if dd._dask_expr_enabled():
60-
# Ensure we're using a hash join
61-
from dask_expr._merge import HashJoinP2P
59+
# Ensure we're using a hash join
60+
from dask_expr._merge import HashJoinP2P
6261

63-
assert any(
64-
isinstance(expr, HashJoinP2P) for expr in joined.optimize()._expr.walk()
65-
)
62+
assert any(isinstance(expr, HashJoinP2P) for expr in joined.optimize()._expr.walk())
6663

6764
expected = pd.merge(A, B, how, "y")
6865
await list_eq(joined, expected)

distributed/shuffle/tests/test_shuffle.py

+1-3
Original file line numberDiff line numberDiff line change
@@ -1637,9 +1637,7 @@ async def test_multi(c, s, a, b):
16371637
await assert_scheduler_cleanup(s)
16381638

16391639

1640-
@pytest.mark.skipif(
1641-
dd._dask_expr_enabled(), reason="worker restrictions are not supported in dask-expr"
1642-
)
1640+
@pytest.mark.skipif(reason="worker restrictions are not supported in dask-expr")
16431641
@gen_cluster(client=True)
16441642
async def test_restrictions(c, s, a, b):
16451643
df = dask.datasets.timeseries(

distributed/shuffle/tests/utils.py

+1-8
Original file line numberDiff line numberDiff line change
@@ -6,14 +6,7 @@
66
from distributed.core import PooledRPCCall
77
from distributed.shuffle._core import ShuffleId, ShuffleRun
88

9-
UNPACK_PREFIX = "shuffle_p2p"
10-
try:
11-
import dask.dataframe as dd
12-
13-
if dd._dask_expr_enabled():
14-
UNPACK_PREFIX = "p2pshuffle"
15-
except ImportError:
16-
pass
9+
UNPACK_PREFIX = "p2pshuffle"
1710

1811

1912
class PooledRPCShuffle(PooledRPCCall):

0 commit comments

Comments
 (0)