Skip to content

Commit

Permalink
lint
Browse files Browse the repository at this point in the history
  • Loading branch information
gjoseph92 committed Aug 12, 2022
1 parent c8dddd3 commit 63b4916
Show file tree
Hide file tree
Showing 6 changed files with 23 additions and 17 deletions.
1 change: 1 addition & 0 deletions setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ ignore =

[isort]
skip = alembic
profile = black

[tool:pytest]
addopts = -v -rsxfE --durations=0 --color=yes --strict-markers --strict-config
Expand Down
6 changes: 3 additions & 3 deletions tests/test_utils_test.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import pytest
import numpy as np
import dask
from dask.utils import parse_bytes
import numpy as np
import pytest
from dask.sizeof import sizeof
from dask.utils import parse_bytes

from .utils_test import scaled_array_shape, timeseries_of_size

Expand Down
8 changes: 4 additions & 4 deletions tests/utils_test.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
import numpy as np
import pandas as pd
import dask
import dask.array as da
import dask.dataframe as dd
from dask.utils import parse_bytes, format_bytes
import distributed
import numpy as np
import pandas as pd
from dask.datasets import timeseries
from dask.sizeof import sizeof
import distributed
from dask.utils import format_bytes, parse_bytes


def scaled_array_shape(
Expand Down
15 changes: 9 additions & 6 deletions tests/workloads/test_array.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
from __future__ import annotations

import xarray as xr
import dask.array as da
from dask.utils import format_bytes
import numpy as np
import xarray as xr
from dask.utils import format_bytes

from ..utils_test import scaled_array_shape, wait, cluster_memory, arr_to_devnull
from ..utils_test import arr_to_devnull, cluster_memory, scaled_array_shape, wait


def print_size_info(memory: int, target_nbytes: int, *arrs: da.Array) -> None:
Expand Down Expand Up @@ -101,10 +101,13 @@ def test_vorticity(small_client):

def pad_rechunk(arr):
"""
Pad a single element onto the end of arr, then merge the 1-element long chunk created back in.
Pad a single element onto the end of arr, then merge the 1-element long chunk
created back in.
This operation complicates each chain of the graph enough so that the scheduler no longer recognises the overall computation as blockwise,
but doesn't actually change the overall topology of the graph, or the number of chunks along any dimension of the array.
This operation complicates each chain of the graph enough so that the scheduler
no longer recognizes the overall computation as blockwise, but doesn't actually
change the overall topology of the graph, or the number of chunks along any
dimension of the array.
This is motivated by the padding operation we do in xGCM, see
Expand Down
6 changes: 4 additions & 2 deletions tests/workloads/test_custom.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
import random
import time

from dask import delayed
from dask.utils import parse_bytes
import time
import random

from ..utils_test import wait

Expand All @@ -19,6 +20,7 @@ def test_jobqueue(small_client):
def task(i: int) -> int:
stuff = "x" * parse_bytes("400MiB")
time.sleep(random.uniform(0, max_sleep))
del stuff
return i

tasks = [task(i) for i in range(n_tasks)]
Expand Down
4 changes: 2 additions & 2 deletions tests/workloads/test_dataframe.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from dask.utils import format_bytes
from dask.sizeof import sizeof
from dask.utils import format_bytes

from ..utils_test import timeseries_of_size, cluster_memory, wait
from ..utils_test import cluster_memory, timeseries_of_size, wait


def print_dataframe_info(df):
Expand Down

0 comments on commit 63b4916

Please sign in to comment.