Skip to content

Commit 1e9d697

Browse files
Purge dask-expr (#1648)
1 parent 615ba99 commit 1e9d697

File tree

11 files changed

+5
-17
lines changed

11 files changed

+5
-17
lines changed

AB_environments/AB_baseline.conda.yaml

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,6 @@ dependencies:
1414
- pandas ==2.2.3
1515
- dask ==2024.11.2
1616
- distributed ==2024.11.2
17-
- dask-expr ==1.1.19
1817
- dask-labextension ==7.0.0
1918
- dask-ml ==2024.4.4
2019
- fsspec ==2024.10.0
@@ -68,4 +67,3 @@ dependencies:
6867
# Read README.md for troubleshooting.
6968
# - git+https://github.com/dask/dask@191d39177009d2cce25b818878118e35329b6db3
7069
# - git+https://github.com/dask/distributed@0304fb6e665e36abf9e3086173cccd36e29ae84d
71-
# - git+https://github.com/dask-contrib/dask-expr@9f765764da3f518ddd4c896c98b8a40a979a5553

AB_environments/AB_sample.conda.yaml

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,6 @@ dependencies:
2020
- pandas ==2.2.3
2121
- dask ==2024.11.2
2222
- distributed ==2024.11.2
23-
- dask-expr ==1.1.19
2423
- dask-labextension ==7.0.0
2524
- dask-ml ==2024.4.4
2625
- fsspec ==2024.10.0
@@ -74,4 +73,3 @@ dependencies:
7473
# Read README.md for troubleshooting.
7574
- git+https://github.com/dask/dask@191d39177009d2cce25b818878118e35329b6db3
7675
- git+https://github.com/dask/distributed@0304fb6e665e36abf9e3086173cccd36e29ae84d
77-
- git+https://github.com/dask-contrib/dask-expr@9f765764da3f518ddd4c896c98b8a40a979a5553

AB_environments/README.md

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -222,7 +222,6 @@ As a handy copy-paste to run from the root dir of this repository:
222222
```bash
223223
pushd ../dask && git fetch upstream --tags && git push origin --tags && popd
224224
pushd ../distributed && git fetch upstream --tags && git push origin --tags && popd
225-
pushd ../dask-expr && git fetch upstream --tags && git push origin --tags && popd
226225
```
227226

228227
#### Problem:

ci/environment-dashboard.yml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,6 @@ dependencies:
1414
- coiled
1515
- conda
1616
- dask
17-
- dask-expr
1817
- dask-ml
1918
- distributed
2019
- filelock

ci/environment-git-tip.yml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,4 +7,3 @@ dependencies:
77
- git+https://github.com/dask/dask
88
- git+https://github.com/dask/distributed
99
- git+https://github.com/dask/zict
10-
- git+https://github.com/dask-contrib/dask-expr

ci/environment.yml

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,6 @@ dependencies:
1515
- pandas ==2.2.3
1616
- dask ==2024.11.2
1717
- distributed ==2024.11.2
18-
- dask-expr ==1.1.19
1918
- dask-labextension ==7.0.0
2019
- dask-ml ==2024.4.4
2120
- fsspec ==2024.10.0

tests/benchmarks/test_join.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ def test_join_big(small_client, memory_multiplier):
2424
df2_big = df2_big.astype({"predicate": "int"})
2525

2626
join = df1_big.merge(df2_big, on="predicate", how="inner")
27-
# dask-expr will drop all columns except the Index for size
27+
# dask.dataframe will drop all columns except the Index for size
2828
# computations, which will optimize itself through merges, e.g.
2929
# shuffling a lot less data than what we want to test
3030
# map_partitions blocks those optimizations
@@ -55,7 +55,7 @@ def test_join_big_small(small_client, memory_multiplier, configure_shuffling):
5555
df_small_pd = df_small.astype({"predicate": "int"}).compute()
5656

5757
join = df_big.merge(df_small_pd, on="predicate", how="inner")
58-
# dask-expr will drop all columns except the Index for size
58+
# dask.dataframe will drop all columns except the Index for size
5959
# computations, which will optimize itself through merges, e.g.
6060
# shuffling a lot less data than what we want to test
6161
# map_partitions blocks those optimizations
@@ -77,7 +77,7 @@ def test_set_index(small_client, persist, memory_multiplier):
7777
if persist:
7878
df_big = df_big.persist()
7979
df_indexed = df_big.set_index("0")
80-
# dask-expr will drop all columns except the Index for size
80+
# dask.dataframe will drop all columns except the Index for size
8181
# computations, which will optimize itself through set_index, e.g.
8282
# shuffling a lot less data than what we want to test
8383
# map_partitions blocks those optimizations

tests/conftest.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,6 @@
1717
import adlfs
1818
import dask
1919
import dask.array as da
20-
import dask_expr
2120
import distributed
2221
import filelock
2322
import gcsfs
@@ -174,7 +173,6 @@ def database_table_schema(request, testrun_uid):
174173
originalname=request.node.originalname,
175174
path=str(request.node.path.relative_to(TEST_DIR)),
176175
dask_version=dask.__version__,
177-
dask_expr_version=dask_expr.__version__,
178176
distributed_version=distributed.__version__,
179177
coiled_runtime_version=os.environ.get("AB_VERSION", "upstream"),
180178
coiled_software_name=COILED_SOFTWARE_NAME,

tests/tpch/conftest.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,6 @@
77

88
import coiled
99
import dask
10-
import dask_expr
1110
import distributed
1211
import filelock
1312
import pytest
@@ -142,7 +141,6 @@ def tpch_database_table_schema(request, testrun_uid, scale, query, local):
142141
originalname=request.node.originalname,
143142
path=str(request.node.path.relative_to(TEST_DIR)),
144143
dask_version=dask.__version__,
145-
dask_expr_version=dask_expr.__version__,
146144
distributed_version=distributed.__version__,
147145
python_version=".".join(map(str, sys.version_info)),
148146
platform=sys.platform,

tests/tpch/dask_queries.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -949,7 +949,7 @@ def query_18(dataset_path, fs, scale):
949949
orders = dd.read_parquet(dataset_path + "orders", filesystem=fs)
950950
lineitem = dd.read_parquet(dataset_path + "lineitem", filesystem=fs)
951951

952-
# FIXME: https://github.com/dask-contrib/dask-expr/issues/867
952+
# FIXME: https://github.com/dask/dask-expr/issues/867
953953
qnt_over_300 = (
954954
lineitem.groupby("l_orderkey").l_quantity.sum(split_out=True).reset_index()
955955
)

0 commit comments

Comments
 (0)