Commit b41c932e authored by Kruyff,D.L.W. (Dylan)'s avatar Kruyff,D.L.W. (Dylan)
Browse files

Redo lost changes

parent 8d4ead19
...@@ -23,8 +23,8 @@ def register_cupyx(): ...@@ -23,8 +23,8 @@ def register_cupyx():
from cupyx.scipy.sparse import spmatrix from cupyx.scipy.sparse import spmatrix
try: try:
from cupy.sparse import hstack from cupyx.scipy.sparse import hstack
from cupy.sparse import vstack from cupyx.scipy.sparse import vstack
except ImportError as e: except ImportError as e:
raise ImportError( raise ImportError(
"Stacking of sparse arrays requires at least CuPy version 8.0.0" "Stacking of sparse arrays requires at least CuPy version 8.0.0"
......
...@@ -23,7 +23,7 @@ def blockwise( ...@@ -23,7 +23,7 @@ def blockwise(
meta=None, meta=None,
**kwargs **kwargs
): ):
""" Tensor operation: Generalized inner and outer products """Tensor operation: Generalized inner and outer products
A broad class of blocked algorithms and patterns can be specified with a A broad class of blocked algorithms and patterns can be specified with a
concise multi-index notation. The ``blockwise`` function applies an in-memory concise multi-index notation. The ``blockwise`` function applies an in-memory
......
...@@ -87,7 +87,7 @@ with ignoring(AttributeError): ...@@ -87,7 +87,7 @@ with ignoring(AttributeError):
def coarsen(reduction, x, axes, trim_excess=False, **kwargs): def coarsen(reduction, x, axes, trim_excess=False, **kwargs):
""" Coarsen array by applying reduction to fixed size neighborhoods """Coarsen array by applying reduction to fixed size neighborhoods
Parameters Parameters
---------- ----------
...@@ -144,7 +144,7 @@ def coarsen(reduction, x, axes, trim_excess=False, **kwargs): ...@@ -144,7 +144,7 @@ def coarsen(reduction, x, axes, trim_excess=False, **kwargs):
def trim(x, axes=None): def trim(x, axes=None):
""" Trim boundaries off of array """Trim boundaries off of array
>>> x = np.arange(24).reshape((4, 6)) >>> x = np.arange(24).reshape((4, 6))
>>> trim(x, axes={0: 0, 1: 1}) >>> trim(x, axes={0: 0, 1: 1})
...@@ -166,7 +166,7 @@ def trim(x, axes=None): ...@@ -166,7 +166,7 @@ def trim(x, axes=None):
def topk(a, k, axis, keepdims): def topk(a, k, axis, keepdims):
""" Chunk and combine function of topk """Chunk and combine function of topk
Extract the k largest elements from a on the given axis. Extract the k largest elements from a on the given axis.
If k is negative, extract the -k smallest elements instead. If k is negative, extract the -k smallest elements instead.
...@@ -184,7 +184,7 @@ def topk(a, k, axis, keepdims): ...@@ -184,7 +184,7 @@ def topk(a, k, axis, keepdims):
def topk_aggregate(a, k, axis, keepdims): def topk_aggregate(a, k, axis, keepdims):
""" Final aggregation function of topk """Final aggregation function of topk
Invoke topk one final time and then sort the results internally. Invoke topk one final time and then sort the results internally.
""" """
...@@ -202,7 +202,7 @@ def topk_aggregate(a, k, axis, keepdims): ...@@ -202,7 +202,7 @@ def topk_aggregate(a, k, axis, keepdims):
def argtopk_preprocess(a, idx): def argtopk_preprocess(a, idx):
""" Preparatory step for argtopk """Preparatory step for argtopk
Put data together with its original indices in a tuple. Put data together with its original indices in a tuple.
""" """
...@@ -210,7 +210,7 @@ def argtopk_preprocess(a, idx): ...@@ -210,7 +210,7 @@ def argtopk_preprocess(a, idx):
def argtopk(a_plus_idx, k, axis, keepdims): def argtopk(a_plus_idx, k, axis, keepdims):
""" Chunk and combine function of argtopk """Chunk and combine function of argtopk
Extract the indices of the k largest elements from a on the given axis. Extract the indices of the k largest elements from a on the given axis.
If k is negative, extract the indices of the -k smallest elements instead. If k is negative, extract the indices of the -k smallest elements instead.
...@@ -239,7 +239,7 @@ def argtopk(a_plus_idx, k, axis, keepdims): ...@@ -239,7 +239,7 @@ def argtopk(a_plus_idx, k, axis, keepdims):
def argtopk_aggregate(a_plus_idx, k, axis, keepdims): def argtopk_aggregate(a_plus_idx, k, axis, keepdims):
""" Final aggregation function of argtopk """Final aggregation function of argtopk
Invoke argtopk one final time, sort the results internally, drop the data Invoke argtopk one final time, sort the results internally, drop the data
and return the index only. and return the index only.
...@@ -278,7 +278,7 @@ def view(x, dtype, order="C"): ...@@ -278,7 +278,7 @@ def view(x, dtype, order="C"):
def slice_with_int_dask_array(x, idx, offset, x_size, axis): def slice_with_int_dask_array(x, idx, offset, x_size, axis):
""" Chunk function of `slice_with_int_dask_array_on_axis`. """Chunk function of `slice_with_int_dask_array_on_axis`.
Slice one chunk of x by one chunk of idx. Slice one chunk of x by one chunk of idx.
Parameters Parameters
...@@ -320,7 +320,7 @@ def slice_with_int_dask_array(x, idx, offset, x_size, axis): ...@@ -320,7 +320,7 @@ def slice_with_int_dask_array(x, idx, offset, x_size, axis):
def slice_with_int_dask_array_aggregate(idx, chunk_outputs, x_chunks, axis): def slice_with_int_dask_array_aggregate(idx, chunk_outputs, x_chunks, axis):
""" Final aggregation function of `slice_with_int_dask_array_on_axis`. """Final aggregation function of `slice_with_int_dask_array_on_axis`.
Aggregate all chunks of x by one chunk of idx, reordering the output of Aggregate all chunks of x by one chunk of idx, reordering the output of
`slice_with_int_dask_array`. `slice_with_int_dask_array`.
......
...@@ -105,7 +105,7 @@ def getter(a, b, asarray=True, lock=None): ...@@ -105,7 +105,7 @@ def getter(a, b, asarray=True, lock=None):
def getter_nofancy(a, b, asarray=True, lock=None): def getter_nofancy(a, b, asarray=True, lock=None):
""" A simple wrapper around ``getter``. """A simple wrapper around ``getter``.
Used to indicate to the optimization passes that the backend doesn't Used to indicate to the optimization passes that the backend doesn't
support fancy indexing. support fancy indexing.
...@@ -114,7 +114,7 @@ def getter_nofancy(a, b, asarray=True, lock=None): ...@@ -114,7 +114,7 @@ def getter_nofancy(a, b, asarray=True, lock=None):
def getter_inline(a, b, asarray=True, lock=None): def getter_inline(a, b, asarray=True, lock=None):
""" A getter function that optimizations feel comfortable inlining """A getter function that optimizations feel comfortable inlining
Slicing operations with this function may be inlined into a graph, such as Slicing operations with this function may be inlined into a graph, such as
in the following rewrite in the following rewrite
...@@ -166,7 +166,7 @@ def implements(*numpy_functions): ...@@ -166,7 +166,7 @@ def implements(*numpy_functions):
def slices_from_chunks(chunks): def slices_from_chunks(chunks):
""" Translate chunks tuple to a set of slices in product order """Translate chunks tuple to a set of slices in product order
>>> slices_from_chunks(((2, 2), (3, 3, 3))) # doctest: +NORMALIZE_WHITESPACE >>> slices_from_chunks(((2, 2), (3, 3, 3))) # doctest: +NORMALIZE_WHITESPACE
[(slice(0, 2, None), slice(0, 3, None)), [(slice(0, 2, None), slice(0, 3, None)),
...@@ -194,7 +194,7 @@ def getem( ...@@ -194,7 +194,7 @@ def getem(
asarray=True, asarray=True,
dtype=None, dtype=None,
): ):
""" Dask getting various chunks from an array-like """Dask getting various chunks from an array-like
>>> getem('X', chunks=(2, 3), shape=(4, 6)) # doctest: +SKIP >>> getem('X', chunks=(2, 3), shape=(4, 6)) # doctest: +SKIP
{('X', 0, 0): (getter, 'X', (slice(0, 2), slice(0, 3))), {('X', 0, 0): (getter, 'X', (slice(0, 2), slice(0, 3))),
...@@ -227,7 +227,7 @@ def getem( ...@@ -227,7 +227,7 @@ def getem(
def dotmany(A, B, leftfunc=None, rightfunc=None, **kwargs): def dotmany(A, B, leftfunc=None, rightfunc=None, **kwargs):
""" Dot product of many aligned chunks """Dot product of many aligned chunks
>>> x = np.array([[1, 2], [1, 2]]) >>> x = np.array([[1, 2], [1, 2]])
>>> y = np.array([[10, 20], [10, 20]]) >>> y = np.array([[10, 20], [10, 20]])
...@@ -249,7 +249,7 @@ def dotmany(A, B, leftfunc=None, rightfunc=None, **kwargs): ...@@ -249,7 +249,7 @@ def dotmany(A, B, leftfunc=None, rightfunc=None, **kwargs):
def _concatenate2(arrays, axes=[]): def _concatenate2(arrays, axes=[]):
""" Recursively Concatenate nested lists of arrays along axes """Recursively Concatenate nested lists of arrays along axes
Each entry in axes corresponds to each level of the nested list. The Each entry in axes corresponds to each level of the nested list. The
length of axes should correspond to the level of nesting of arrays. length of axes should correspond to the level of nesting of arrays.
...@@ -369,7 +369,7 @@ def apply_infer_dtype(func, args, kwargs, funcname, suggest_dtype="dtype", nout= ...@@ -369,7 +369,7 @@ def apply_infer_dtype(func, args, kwargs, funcname, suggest_dtype="dtype", nout=
def normalize_arg(x): def normalize_arg(x):
""" Normalize user provided arguments to blockwise or map_blocks """Normalize user provided arguments to blockwise or map_blocks
We do a few things: We do a few things:
...@@ -391,7 +391,7 @@ def normalize_arg(x): ...@@ -391,7 +391,7 @@ def normalize_arg(x):
def _pass_extra_kwargs(func, keys, *args, **kwargs): def _pass_extra_kwargs(func, keys, *args, **kwargs):
""" Helper for :func:`map_blocks` to pass `block_info` or `block_id`. """Helper for :func:`map_blocks` to pass `block_info` or `block_id`.
For each element of `keys`, a corresponding element of args is changed For each element of `keys`, a corresponding element of args is changed
to a keyword argument with that key, before all arguments re passed on to a keyword argument with that key, before all arguments re passed on
...@@ -413,7 +413,7 @@ def map_blocks( ...@@ -413,7 +413,7 @@ def map_blocks(
meta=None, meta=None,
**kwargs, **kwargs,
): ):
""" Map a function across all blocks of a dask array. """Map a function across all blocks of a dask array.
Parameters Parameters
---------- ----------
...@@ -760,7 +760,7 @@ def map_blocks( ...@@ -760,7 +760,7 @@ def map_blocks(
def broadcast_chunks(*chunkss): def broadcast_chunks(*chunkss):
""" Construct a chunks tuple that broadcasts many chunks tuples """Construct a chunks tuple that broadcasts many chunks tuples
>>> a = ((5, 5),) >>> a = ((5, 5),)
>>> b = ((5, 5),) >>> b = ((5, 5),)
...@@ -812,7 +812,7 @@ def store( ...@@ -812,7 +812,7 @@ def store(
return_stored=False, return_stored=False,
**kwargs, **kwargs,
): ):
""" Store dask arrays in array-like objects, overwrite data in target """Store dask arrays in array-like objects, overwrite data in target
This stores dask arrays into object that supports numpy-style setitem This stores dask arrays into object that supports numpy-style setitem
indexing. It stores values chunk by chunk so that it does not have to indexing. It stores values chunk by chunk so that it does not have to
...@@ -999,7 +999,7 @@ See the following documentation page for details: ...@@ -999,7 +999,7 @@ See the following documentation page for details:
class Array(DaskMethodsMixin): class Array(DaskMethodsMixin):
""" Parallel Dask Array """Parallel Dask Array
A parallel nd-array comprised of many numpy arrays arranged in a grid. A parallel nd-array comprised of many numpy arrays arranged in a grid.
...@@ -1398,7 +1398,7 @@ class Array(DaskMethodsMixin): ...@@ -1398,7 +1398,7 @@ class Array(DaskMethodsMixin):
return r return r
def to_svg(self, size=500): def to_svg(self, size=500):
""" Convert chunks from Dask Array into an SVG Image """Convert chunks from Dask Array into an SVG Image
Parameters Parameters
---------- ----------
...@@ -1419,7 +1419,7 @@ class Array(DaskMethodsMixin): ...@@ -1419,7 +1419,7 @@ class Array(DaskMethodsMixin):
return svg(self.chunks, size=size) return svg(self.chunks, size=size)
def to_hdf5(self, filename, datapath, **kwargs): def to_hdf5(self, filename, datapath, **kwargs):
""" Store array in HDF5 file """Store array in HDF5 file
>>> x.to_hdf5('myfile.hdf5', '/x') # doctest: +SKIP >>> x.to_hdf5('myfile.hdf5', '/x') # doctest: +SKIP
...@@ -1435,7 +1435,7 @@ class Array(DaskMethodsMixin): ...@@ -1435,7 +1435,7 @@ class Array(DaskMethodsMixin):
return to_hdf5(filename, datapath, self, **kwargs) return to_hdf5(filename, datapath, self, **kwargs)
def to_dask_dataframe(self, columns=None, index=None, meta=None): def to_dask_dataframe(self, columns=None, index=None, meta=None):
""" Convert dask Array to dask Dataframe """Convert dask Array to dask Dataframe
Parameters Parameters
---------- ----------
...@@ -1636,7 +1636,7 @@ class Array(DaskMethodsMixin): ...@@ -1636,7 +1636,7 @@ class Array(DaskMethodsMixin):
@property @property
def blocks(self): def blocks(self):
""" Slice an array by blocks """Slice an array by blocks
This allows blockwise slicing of a Dask array. You can perform normal This allows blockwise slicing of a Dask array. You can perform normal
Numpy-style slicing but now rather than slice elements of the array you Numpy-style slicing but now rather than slice elements of the array you
...@@ -2118,7 +2118,7 @@ class Array(DaskMethodsMixin): ...@@ -2118,7 +2118,7 @@ class Array(DaskMethodsMixin):
return map_blocks(func, self, *args, **kwargs) return map_blocks(func, self, *args, **kwargs)
def map_overlap(self, func, depth, boundary=None, trim=True, **kwargs): def map_overlap(self, func, depth, boundary=None, trim=True, **kwargs):
""" Map a function over blocks of the array with some overlap """Map a function over blocks of the array with some overlap
We share neighboring zones between blocks of the array, then map a We share neighboring zones between blocks of the array, then map a
function, then trim away the neighboring strips. function, then trim away the neighboring strips.
...@@ -2224,7 +2224,7 @@ class Array(DaskMethodsMixin): ...@@ -2224,7 +2224,7 @@ class Array(DaskMethodsMixin):
return clip(self, min, max) return clip(self, min, max)
def view(self, dtype=None, order="C"): def view(self, dtype=None, order="C"):
""" Get a view of the array as a new data type """Get a view of the array as a new data type
Parameters Parameters
---------- ----------
...@@ -2355,7 +2355,7 @@ def ensure_int(f): ...@@ -2355,7 +2355,7 @@ def ensure_int(f):
def normalize_chunks(chunks, shape=None, limit=None, dtype=None, previous_chunks=None): def normalize_chunks(chunks, shape=None, limit=None, dtype=None, previous_chunks=None):
""" Normalize chunks to tuple of tuples """Normalize chunks to tuple of tuples
This takes in a variety of input types and information and produces a full This takes in a variety of input types and information and produces a full
tuple-of-tuples result for chunks, suitable to be passed to Array or tuple-of-tuples result for chunks, suitable to be passed to Array or
...@@ -2529,7 +2529,7 @@ def _compute_multiplier(limit: int, dtype, largest_block: int, result): ...@@ -2529,7 +2529,7 @@ def _compute_multiplier(limit: int, dtype, largest_block: int, result):
def auto_chunks(chunks, shape, limit, dtype, previous_chunks=None): def auto_chunks(chunks, shape, limit, dtype, previous_chunks=None):
""" Determine automatic chunks """Determine automatic chunks
This takes in a chunks value that contains ``"auto"`` values in certain This takes in a chunks value that contains ``"auto"`` values in certain
dimensions and replaces those values with concrete dimension sizes that try dimensions and replaces those values with concrete dimension sizes that try
...@@ -2654,7 +2654,7 @@ def auto_chunks(chunks, shape, limit, dtype, previous_chunks=None): ...@@ -2654,7 +2654,7 @@ def auto_chunks(chunks, shape, limit, dtype, previous_chunks=None):
def round_to(c, s): def round_to(c, s):
""" Return a chunk dimension that is close to an even multiple or factor """Return a chunk dimension that is close to an even multiple or factor
We want values for c that are nicely aligned with s. We want values for c that are nicely aligned with s.
...@@ -2690,7 +2690,7 @@ def from_array( ...@@ -2690,7 +2690,7 @@ def from_array(
getitem=None, getitem=None,
meta=None, meta=None,
): ):
""" Create dask array from something that looks like an array """Create dask array from something that looks like an array
Input must have a ``.shape``, ``.ndim``, ``.dtype`` and support numpy-style slicing. Input must have a ``.shape``, ``.ndim``, ``.dtype`` and support numpy-style slicing.
...@@ -3029,7 +3029,7 @@ def _check_regular_chunks(chunkset): ...@@ -3029,7 +3029,7 @@ def _check_regular_chunks(chunkset):
def from_delayed(value, shape, dtype=None, meta=None, name=None): def from_delayed(value, shape, dtype=None, meta=None, name=None):
""" Create a dask array from a dask delayed value """Create a dask array from a dask delayed value
This routine is useful for constructing dask arrays in an ad-hoc fashion This routine is useful for constructing dask arrays in an ad-hoc fashion
using dask delayed, particularly when combined with stack and concatenate. using dask delayed, particularly when combined with stack and concatenate.
...@@ -3062,7 +3062,7 @@ def from_delayed(value, shape, dtype=None, meta=None, name=None): ...@@ -3062,7 +3062,7 @@ def from_delayed(value, shape, dtype=None, meta=None, name=None):
def from_func(func, shape, dtype=None, name=None, args=(), kwargs={}): def from_func(func, shape, dtype=None, name=None, args=(), kwargs={}):
""" Create dask array in a single block by calling a function """Create dask array in a single block by calling a function
Calling the provided function with func(*args, **kwargs) should return a Calling the provided function with func(*args, **kwargs) should return a
NumPy array of the indicated shape and dtype. NumPy array of the indicated shape and dtype.
...@@ -3090,7 +3090,7 @@ def from_func(func, shape, dtype=None, name=None, args=(), kwargs={}): ...@@ -3090,7 +3090,7 @@ def from_func(func, shape, dtype=None, name=None, args=(), kwargs={}):
def common_blockdim(blockdims): def common_blockdim(blockdims):
""" Find the common block dimensions from the list of block dimensions """Find the common block dimensions from the list of block dimensions
Currently only implements the simplest possible heuristic: the common Currently only implements the simplest possible heuristic: the common
block-dimension is the only one that does not span fully span a dimension. block-dimension is the only one that does not span fully span a dimension.
...@@ -3877,7 +3877,7 @@ def broadcast_shapes(*shapes): ...@@ -3877,7 +3877,7 @@ def broadcast_shapes(*shapes):
def elemwise(op, *args, **kwargs): def elemwise(op, *args, **kwargs):
""" Apply elementwise function across arguments """Apply elementwise function across arguments
Respects broadcasting rules Respects broadcasting rules
...@@ -3960,7 +3960,7 @@ def elemwise(op, *args, **kwargs): ...@@ -3960,7 +3960,7 @@ def elemwise(op, *args, **kwargs):
def handle_out(out, result): def handle_out(out, result):
""" Handle out parameters """Handle out parameters
If out is a dask.array then this overwrites the contents of that array with If out is a dask.array then this overwrites the contents of that array with
the result the result
...@@ -4127,7 +4127,7 @@ def broadcast_arrays(*args, **kwargs): ...@@ -4127,7 +4127,7 @@ def broadcast_arrays(*args, **kwargs):
def offset_func(func, offset, *args): def offset_func(func, offset, *args):
""" Offsets inputs by offset """Offsets inputs by offset
>>> double = lambda x: x * 2 >>> double = lambda x: x * 2
>>> f = offset_func(double, (10,)) >>> f = offset_func(double, (10,))
...@@ -4148,7 +4148,7 @@ def offset_func(func, offset, *args): ...@@ -4148,7 +4148,7 @@ def offset_func(func, offset, *args):
def chunks_from_arrays(arrays): def chunks_from_arrays(arrays):
""" Chunks tuple from nested list of arrays """Chunks tuple from nested list of arrays
>>> x = np.array([1, 2]) >>> x = np.array([1, 2])
>>> chunks_from_arrays([x, x]) >>> chunks_from_arrays([x, x])
...@@ -4184,7 +4184,7 @@ def chunks_from_arrays(arrays): ...@@ -4184,7 +4184,7 @@ def chunks_from_arrays(arrays):
def deepfirst(seq): def deepfirst(seq):
""" First element in a nested list """First element in a nested list
>>> deepfirst([[[1, 2], [3, 4]], [5, 6], [7, 8]]) >>> deepfirst([[[1, 2], [3, 4]], [5, 6], [7, 8]])
1 1
...@@ -4204,7 +4204,7 @@ def shapelist(a): ...@@ -4204,7 +4204,7 @@ def shapelist(a):
def reshapelist(shape, seq): def reshapelist(shape, seq):
""" Reshape iterator to nested shape """Reshape iterator to nested shape
>>> reshapelist((2, 3), range(6)) >>> reshapelist((2, 3), range(6))
[[0, 1, 2], [3, 4, 5]] [[0, 1, 2], [3, 4, 5]]
...@@ -4217,7 +4217,7 @@ def reshapelist(shape, seq): ...@@ -4217,7 +4217,7 @@ def reshapelist(shape, seq):
def transposelist(arrays, axes, extradims=0): def transposelist(arrays, axes, extradims=0):
""" Permute axes of nested list """Permute axes of nested list
>>> transposelist([[1,1,1],[1,1,1]], [2,1]) >>> transposelist([[1,1,1],[1,1,1]], [2,1])
[[[1, 1], [1, 1], [1, 1]]] [[[1, 1], [1, 1], [1, 1]]]
...@@ -4357,7 +4357,7 @@ def stack(seq, axis=0, allow_unknown_chunksizes=False): ...@@ -4357,7 +4357,7 @@ def stack(seq, axis=0, allow_unknown_chunksizes=False):
def concatenate3(arrays): def concatenate3(arrays):
""" Recursive np.concatenate """Recursive np.concatenate
Input should be a nested list of numpy arrays arranged in the order they Input should be a nested list of numpy arrays arranged in the order they
should appear in the array itself. Each array should have the same number should appear in the array itself. Each array should have the same number
...@@ -4436,7 +4436,7 @@ def concatenate_axes(arrays, axes): ...