Commit b41c932e authored by Kruyff,D.L.W. (Dylan)'s avatar Kruyff,D.L.W. (Dylan)
Browse files

Redo lost changes

parent 8d4ead19
...@@ -460,6 +460,13 @@ def test_tokenize_dense_sparse_array(cls_name): ...@@ -460,6 +460,13 @@ def test_tokenize_dense_sparse_array(cls_name):
assert tokenize(a) != tokenize(b) assert tokenize(a) != tokenize(b)
def test_tokenize_object_with_recursion_error_returns_uuid():
cycle = dict(a=None)
cycle["a"] = cycle
assert len(tokenize(cycle)) == 32
def test_is_dask_collection(): def test_is_dask_collection():
class DummyCollection(object): class DummyCollection(object):
def __init__(self, dsk=None): def __init__(self, dsk=None):
......
...@@ -285,7 +285,9 @@ def test_immutable_attributes(): ...@@ -285,7 +285,9 @@ def test_immutable_attributes():
attrs_data_test = copy.deepcopy(attrs_data) attrs_data_test = copy.deepcopy(attrs_data)
to_graphviz( to_graphviz(
dsk, function_attributes=attrs_func, data_attributes=attrs_data, dsk,
function_attributes=attrs_func,
data_attributes=attrs_data,
) )
assert attrs_func_test == attrs_func assert attrs_func_test == attrs_func
......
...@@ -45,8 +45,7 @@ def test_pickle_globals(): ...@@ -45,8 +45,7 @@ def test_pickle_globals():
@requires_cloudpickle @requires_cloudpickle
def test_pickle_locals(): def test_pickle_locals():
"""Unrelated locals should not be included in serialized bytes """Unrelated locals should not be included in serialized bytes"""
"""
def unrelated_function_local(a): def unrelated_function_local(a):
return np.array([a]) return np.array([a])
...@@ -78,8 +77,7 @@ def test_pickle_kwargs(): ...@@ -78,8 +77,7 @@ def test_pickle_kwargs():
@pytest.mark.skipif(pickle.HIGHEST_PROTOCOL < 5, reason="requires pickle protocol 5") @pytest.mark.skipif(pickle.HIGHEST_PROTOCOL < 5, reason="requires pickle protocol 5")
def test_out_of_band_pickling(): def test_out_of_band_pickling():
"""Test that out-of-band pickling works """Test that out-of-band pickling works"""
"""
if has_cloudpickle: if has_cloudpickle:
if cloudpickle.__version__ < LooseVersion("1.3.0"): if cloudpickle.__version__ < LooseVersion("1.3.0"):
pytest.skip("when using cloudpickle, it must be version 1.3.0+") pytest.skip("when using cloudpickle, it must be version 1.3.0+")
...@@ -229,7 +227,7 @@ def check_for_pytest(): ...@@ -229,7 +227,7 @@ def check_for_pytest():
sys.platform == "win32", reason="Windows doesn't support different contexts" sys.platform == "win32", reason="Windows doesn't support different contexts"
) )
def test_custom_context_used_python3_posix(): def test_custom_context_used_python3_posix():
""" The 'multiprocessing.context' config is used to create the pool. """The 'multiprocessing.context' config is used to create the pool.
We assume default is 'spawn', and therefore test for 'fork'. We assume default is 'spawn', and therefore test for 'fork'.
""" """
...@@ -257,7 +255,7 @@ def test_custom_context_used_python3_posix(): ...@@ -257,7 +255,7 @@ def test_custom_context_used_python3_posix():
sys.platform == "win32", reason="Windows doesn't support different contexts" sys.platform == "win32", reason="Windows doesn't support different contexts"
) )
def test_get_context_using_python3_posix(): def test_get_context_using_python3_posix():
""" get_context() respects configuration. """get_context() respects configuration.
If default context is changed this test will need to change too. If default context is changed this test will need to change too.
""" """
...@@ -270,7 +268,7 @@ def test_get_context_using_python3_posix(): ...@@ -270,7 +268,7 @@ def test_get_context_using_python3_posix():
@pytest.mark.skipif(sys.platform != "win32", reason="POSIX supports different contexts") @pytest.mark.skipif(sys.platform != "win32", reason="POSIX supports different contexts")
def test_custom_context_ignored_elsewhere(): def test_custom_context_ignored_elsewhere():
""" On Windows, setting 'multiprocessing.context' doesn't explode. """On Windows, setting 'multiprocessing.context' doesn't explode.
Presumption is it's not used since it's unsupported, but mostly we care about Presumption is it's not used since it's unsupported, but mostly we care about
not breaking anything. not breaking anything.
......
...@@ -546,7 +546,7 @@ def test_map_overlap(abcde): ...@@ -546,7 +546,7 @@ def test_map_overlap(abcde):
def test_use_structure_not_keys(abcde): def test_use_structure_not_keys(abcde):
""" See https://github.com/dask/dask/issues/5584#issuecomment-554963958 """See https://github.com/dask/dask/issues/5584#issuecomment-554963958
We were using key names to infer structure, which could result in funny behavior. We were using key names to infer structure, which could result in funny behavior.
""" """
...@@ -601,7 +601,7 @@ def test_dont_run_all_dependents_too_early(abcde): ...@@ -601,7 +601,7 @@ def test_dont_run_all_dependents_too_early(abcde):
def test_many_branches_use_ndependencies(abcde): def test_many_branches_use_ndependencies(abcde):
""" From https://github.com/dask/dask/pull/5646#issuecomment-562700533 """From https://github.com/dask/dask/pull/5646#issuecomment-562700533
Sometimes we need larger or wider DAGs to test behavior. This test Sometimes we need larger or wider DAGs to test behavior. This test
ensures we choose the branch with more work twice in successtion. ensures we choose the branch with more work twice in successtion.
...@@ -714,7 +714,7 @@ def test_switching_dependents(abcde): ...@@ -714,7 +714,7 @@ def test_switching_dependents(abcde):
def test_order_with_equal_dependents(abcde): def test_order_with_equal_dependents(abcde):
""" From https://github.com/dask/dask/issues/5859#issuecomment-608422198 """From https://github.com/dask/dask/issues/5859#issuecomment-608422198
See the visualization of `(maxima, argmax)` example from the above comment. See the visualization of `(maxima, argmax)` example from the above comment.
......
import sys import sys
from array import array
import pytest import pytest
...@@ -18,6 +19,13 @@ def test_containers(): ...@@ -18,6 +19,13 @@ def test_containers():
assert sizeof([1, 2, [3]]) > (getsizeof(3) * 3 + getsizeof([])) assert sizeof([1, 2, [3]]) > (getsizeof(3) * 3 + getsizeof([]))
def test_bytes_like():
assert 1000 <= sizeof(bytes(1000)) <= 2000
assert 1000 <= sizeof(bytearray(1000)) <= 2000
assert 1000 <= sizeof(memoryview(bytes(1000))) <= 2000
assert 8000 <= sizeof(array("d", range(1000))) <= 9000
def test_numpy(): def test_numpy():
np = pytest.importorskip("numpy") np = pytest.importorskip("numpy")
assert 8000 <= sizeof(np.empty(1000, dtype="f8")) <= 9000 assert 8000 <= sizeof(np.empty(1000, dtype="f8")) <= 9000
......
...@@ -28,6 +28,7 @@ from dask.utils import ( ...@@ -28,6 +28,7 @@ from dask.utils import (
parse_timedelta, parse_timedelta,
parse_bytes, parse_bytes,
is_arraylike, is_arraylike,
iter_chunks,
) )
from dask.utils_test import inc from dask.utils_test import inc
from dask.highlevelgraph import HighLevelGraph from dask.highlevelgraph import HighLevelGraph
...@@ -454,7 +455,7 @@ def test_has_keyword(): ...@@ -454,7 +455,7 @@ def test_has_keyword():
def test_derived_from(): def test_derived_from():
class Foo: class Foo:
def f(a, b): def f(a, b):
""" A super docstring """A super docstring
An explanation An explanation
...@@ -566,3 +567,18 @@ def test_is_arraylike(): ...@@ -566,3 +567,18 @@ def test_is_arraylike():
assert is_arraylike(np.empty(())) is True assert is_arraylike(np.empty(())) is True
assert is_arraylike(np.empty((0,))) is True assert is_arraylike(np.empty((0,))) is True
assert is_arraylike(np.empty((0, 0))) is True assert is_arraylike(np.empty((0, 0))) is True
def test_iter_chunks():
sizes = [14, 8, 5, 9, 7, 9, 1, 19, 8, 19]
assert list(iter_chunks(sizes, 19)) == [
[14],
[8, 5],
[9, 7],
[9, 1],
[19],
[8],
[19],
]
assert list(iter_chunks(sizes, 28)) == [[14, 8, 5], [9, 7, 9, 1], [19, 8], [19]]
assert list(iter_chunks(sizes, 67)) == [[14, 8, 5, 9, 7, 9, 1], [19, 8, 19]]
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment