From 9672d9f964f14b5acdec5c0ebff6c144e33069fc Mon Sep 17 00:00:00 2001 From: alex rakowski Date: Mon, 25 Jul 2022 18:52:11 +0000 Subject: [PATCH 01/10] adding back in the stack pointer attr for h5 files --- py4DSTEM/io/datastructure/py4dstem/datacube.py | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/py4DSTEM/io/datastructure/py4dstem/datacube.py b/py4DSTEM/io/datastructure/py4dstem/datacube.py index 7f18f580d..7c698a64e 100644 --- a/py4DSTEM/io/datastructure/py4dstem/datacube.py +++ b/py4DSTEM/io/datastructure/py4dstem/datacube.py @@ -36,7 +36,8 @@ def __init__( Q_pixel_size: Optional[Union[float,list]] = 1, Q_pixel_units: Optional[Union[str,list]] = 'pixels', slicelabels: Optional[Union[bool,list]] = None, - calibration: Optional = None, + calibration: Optional[Calibration] = None, + stack_pointer = None, ): """ Accepts: @@ -99,6 +100,12 @@ def __init__( self.tree['calibration'].set_Q_pixel_size( Q_pixel_size ) self.tree['calibration'].set_Q_pixel_units( Q_pixel_units ) + # Add attribute of stack pointer for Dask related stuff + # Tacking this here for now + # this can also be used as a quick check for + self.stack_pointer = stack_pointer + + From e1a80010c453a8d55dc500a0d2e6d35771a28a4b Mon Sep 17 00:00:00 2001 From: alex rakowski Date: Mon, 25 Jul 2022 18:53:01 +0000 Subject: [PATCH 02/10] adding DASK load functionality for v_0_12 --- py4DSTEM/io/native/legacy/read_v0_12.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/py4DSTEM/io/native/legacy/read_v0_12.py b/py4DSTEM/io/native/legacy/read_v0_12.py index 0a3bb225a..8d12c83a9 100644 --- a/py4DSTEM/io/native/legacy/read_v0_12.py +++ b/py4DSTEM/io/native/legacy/read_v0_12.py @@ -1,5 +1,6 @@ # Reader for py4DSTEM v0.12 files +from inspect import stack import h5py import numpy as np from os.path import splitext, exists @@ -11,6 +12,7 @@ from ...datastructure import PointList from ...datastructure import PointListArray from .... import tqdmnd +import dask.array as da def read_v0_12(fp, **kwargs): """ @@ -286,8 +288,14 @@ def get_datacube_from_grp(g,mem='RAM',binfactor=1,bindtype=None): elif (mem, binfactor) == ("MEMMAP", 1): data = g['data'] stack_pointer = None + elif (mem, binfactor) == ("DASK", 1): + stack_pointer = g['data'] + shape = g['data'].shape + + data = da.from_array(stack_pointer, chunks=(1,1,shape[2], shape[3])) + name = g.name.split('/')[-1] - return DataCube(data=data,name=name) + return DataCube(data=data,name=name, stack_pointer=stack_pointer) def get_diffractionslice_from_grp(g): From 5a51dd1570435981d6cbbbb7bdb0e6d81fe05024 Mon Sep 17 00:00:00 2001 From: alex rakowski Date: Mon, 25 Jul 2022 18:53:38 +0000 Subject: [PATCH 03/10] adding parallel detection back to the name space --- py4DSTEM/process/diskdetection/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/py4DSTEM/process/diskdetection/__init__.py b/py4DSTEM/process/diskdetection/__init__.py index 0c7635191..bc39348a2 100644 --- a/py4DSTEM/process/diskdetection/__init__.py +++ b/py4DSTEM/process/diskdetection/__init__.py @@ -1,4 +1,4 @@ from .diskdetection import * from .braggvectormap import * #from .diskdetection_aiml import * -#from .diskdetection_parallel_new import * +from .diskdetection_parallel_new import * From 2dd8998f94514211d10fb593223cef4dff44ac82 Mon Sep 17 00:00:00 2001 From: alex rakowski Date: Mon, 25 Jul 2022 18:56:28 +0000 Subject: [PATCH 04/10] updating to new syntax and func names --- .../diskdetection_parallel_new.py | 28 +++++++++++++------ 1 file changed, 19 insertions(+), 9 deletions(-) diff --git a/py4DSTEM/process/diskdetection/diskdetection_parallel_new.py b/py4DSTEM/process/diskdetection/diskdetection_parallel_new.py index 6601751b3..c94de2601 100644 --- a/py4DSTEM/process/diskdetection/diskdetection_parallel_new.py +++ b/py4DSTEM/process/diskdetection/diskdetection_parallel_new.py @@ -8,7 +8,7 @@ import dask #import dask.bag as db from py4DSTEM.io.datastructure import PointListArray, PointList -from .diskdetection import _find_Bragg_disks_single_DP_FK +from .diskdetection import _find_Bragg_disks_single from py4DSTEM.io import PointListArray, PointList, datastructure import time from dask.diagnostics import ProgressBar @@ -50,7 +50,6 @@ def register_dill_serializer(): register_serialization_family('dill', dill_dumps, dill_loads) return None - #### END OF SERAILISERS #### @@ -62,7 +61,7 @@ def register_dill_serializer(): # TODO add ML-AI version def _find_Bragg_disks_single_DP_FK_dask_wrapper(arr, *args,**kwargs): # THis is needed as _find_Bragg_disks_single_DP_FK takes 2D array these arrays have the wrong shape - return _find_Bragg_disks_single_DP_FK(arr[0,0], *args, **kwargs) + return _find_Bragg_disks_single(arr[0,0], *args, **kwargs) #### END OF DASK WRAPPER FUNCTIONS #### @@ -75,7 +74,7 @@ def _find_Bragg_disks_single_DP_FK_dask_wrapper(arr, *args,**kwargs): def beta_parallel_disk_detection(dataset, probe, - #rxmin=None, # these would allow selecting a sub section + #rxmin=None, # these would allow selecting a sub section # probably not a useful case #rxmax=None, #rymin=None, #rymax=None, @@ -125,13 +124,21 @@ def beta_parallel_disk_detection(dataset, # ... dask stuff. #TODO add assert statements and other checks. Think about reordering opperations + ## adding assert statement to make sure peaks not passed as a keyword argument + assert 'peaks' not in kwargs, "peaks must not be passed as a keyword arguement" + + # Check to see if a dask client has been passed. + # if no client passed if dask_client == None: + # check if parameters are passed create a cluster, and pass them to dask client. if dask_client_params !=None: dask.config.set({'distributed.worker.memory.spill': False, 'distributed.worker.memory.target': False}) cluster = LocalCluster(**dask_client_params) dask_client = Client(cluster, **dask_client_params) + + # if no parameters are passed create them with some default values else: # AUTO MAGICALLY SET? # LET DASK SET? @@ -154,8 +161,10 @@ def beta_parallel_disk_detection(dataset, pass - # Probe stuff + #### Probe stuff + # check that the probe shape is correct. assert (probe.shape == dataset.data.shape[2:]), "Probe and Diffraction Pattern Shapes are Mismatched" + if probe_type != "FT": #TODO clean up and pull out redudant parts #if probe.dtype != (np.complex128 or np.complex64 or np.complex256): @@ -192,7 +201,7 @@ def beta_parallel_disk_detection(dataset, # loop over the dataset_delayed and create a delayed function of for x in np.ndindex(dataset_delayed.shape): temp = delayed(_find_Bragg_disks_single_DP_FK_dask_wrapper)(dataset_delayed[x], - probe_kernel_FT=dask_probe_delayed[0,0], + template=dask_probe_delayed[0,0], #probe_kernel_FT=delayed_probe_kernel_FT, *args, **kwargs) #passing through args from earlier or should I use #corrPower=corrPower, @@ -207,16 +216,17 @@ def beta_parallel_disk_detection(dataset, output = dask_client.gather(_temp_peaks) # gather the future objects - coords = [('qx',float),('qy',float),('intensity',float)] - peaks = PointListArray(coordinates=coords, shape=dataset.data.shape[:-2]) + dtype = [('qx',float),('qy',float),('intensity',float)] + peaks = PointListArray(dtype=dtype, shape=dataset.data.shape[:-2]) #temp_peaks[0][0] # operating over a list so we need the size (0->count) and re-create the probe positions (0->rx,0->ry), + # count is the size of the list for (count,(rx, ry)) in zip([i for i in range(dataset.data[...,0,0].size)],np.ndindex(dataset.data.shape[:-2])): #peaks.get_pointlist(rx, ry).add_pointlist(temp_peaks[0][count]) #peaks.get_pointlist(rx, ry).add_pointlist(output[count][0]) - peaks.get_pointlist(rx, ry).add_pointlist(output[count]) + peaks.get_pointlist(rx, ry).add(output[count]) # Clean up dask_client.cancel(_temp_peaks) # removes from the dask workers From 904d95e07ad39a315031afb7c1e4318407ffd911 Mon Sep 17 00:00:00 2001 From: alex rakowski Date: Mon, 25 Jul 2022 22:02:56 +0000 Subject: [PATCH 05/10] converting pointlist array to BraggVectors type --- .../diskdetection_parallel_new.py | 22 +++++++++++++------ 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/py4DSTEM/process/diskdetection/diskdetection_parallel_new.py b/py4DSTEM/process/diskdetection/diskdetection_parallel_new.py index c94de2601..cff426026 100644 --- a/py4DSTEM/process/diskdetection/diskdetection_parallel_new.py +++ b/py4DSTEM/process/diskdetection/diskdetection_parallel_new.py @@ -8,6 +8,8 @@ import dask #import dask.bag as db from py4DSTEM.io.datastructure import PointListArray, PointList +from ...io.datastructure.py4dstem import DataCube, QPoints, BraggVectors + from .diskdetection import _find_Bragg_disks_single from py4DSTEM.io import PointListArray, PointList, datastructure import time @@ -219,8 +221,7 @@ def beta_parallel_disk_detection(dataset, dtype = [('qx',float),('qy',float),('intensity',float)] peaks = PointListArray(dtype=dtype, shape=dataset.data.shape[:-2]) - #temp_peaks[0][0] - + # operating over a list so we need the size (0->count) and re-create the probe positions (0->rx,0->ry), # count is the size of the list for (count,(rx, ry)) in zip([i for i in range(dataset.data[...,0,0].size)],np.ndindex(dataset.data.shape[:-2])): @@ -228,17 +229,24 @@ def beta_parallel_disk_detection(dataset, #peaks.get_pointlist(rx, ry).add_pointlist(output[count][0]) peaks.get_pointlist(rx, ry).add(output[count]) - # Clean up + + # create a BraggVectors obj + braggvectors = BraggVectors(dataset.Rshape, dataset.Qshape) + # populate the uncalibrated object with the + braggvectors._v_uncal = peaks + + + # Clean up dask related stuff dask_client.cancel(_temp_peaks) # removes from the dask workers del _temp_peaks # deletes the object if close_dask_client: dask_client.close() - return peaks + return braggvectors elif close_dask_client == False and return_dask_client == True: - return peaks, dask_client + return braggvectors, dask_client elif close_dask_client and return_dask_client == False: - return peaks + return braggvectors else: print('Dask Client in unknown state, this may result in unpredicitable behaviour later') - return peaks + return braggvectors From a51bd053b244915b09b2b6b66155b958f9d04f43 Mon Sep 17 00:00:00 2001 From: alex rakowski Date: Thu, 3 Nov 2022 21:21:58 +0000 Subject: [PATCH 06/10] more changes --- .../io/datastructure/py4dstem/datacube_fns.py | 3 ++ .../process/diskdetection/diskdetection.py | 38 ++++++++++++++----- .../diskdetection_parallel_new.py | 3 ++ 3 files changed, 34 insertions(+), 10 deletions(-) diff --git a/py4DSTEM/io/datastructure/py4dstem/datacube_fns.py b/py4DSTEM/io/datastructure/py4dstem/datacube_fns.py index bdc3f9ada..79702f872 100644 --- a/py4DSTEM/io/datastructure/py4dstem/datacube_fns.py +++ b/py4DSTEM/io/datastructure/py4dstem/datacube_fns.py @@ -394,6 +394,7 @@ def find_Bragg_disks( CUDA = False, CUDA_batched = True, distributed = None, + dask = None, _qt_progress_bar = None, @@ -496,6 +497,7 @@ def find_Bragg_disks( processing if distributed is None, which is the default, processing will be in serial + dask (dict): if not None ... TODO _qt_progress_bar (QProgressBar instance): used only by the GUI for serial execution name (str): name for the output BraggVectors @@ -541,6 +543,7 @@ def find_Bragg_disks( CUDA = CUDA, CUDA_batched = CUDA_batched, distributed = distributed, + dask = dask, _qt_progress_bar = _qt_progress_bar, ) diff --git a/py4DSTEM/process/diskdetection/diskdetection.py b/py4DSTEM/process/diskdetection/diskdetection.py index af736ab1e..5ff0afe75 100644 --- a/py4DSTEM/process/diskdetection/diskdetection.py +++ b/py4DSTEM/process/diskdetection/diskdetection.py @@ -8,6 +8,7 @@ from ..utils.get_maxima_2D import get_maxima_2D from ..utils.cross_correlate import get_cross_correlation_FT from ...utils.tqdmnd import tqdmnd +# from .diskdetection_parallel_new import beta_parallel_disk_detection @@ -34,9 +35,10 @@ def find_Bragg_disks( CUDA = False, CUDA_batched = True, distributed = None, + dask = None, _qt_progress_bar = None, - ): + **kws): """ Finds the Bragg disks in the diffraction patterns represented by `data` by cross/phase correlatin with `template`. @@ -53,10 +55,10 @@ def find_Bragg_disks( and returns a instance or length N list of instances of QPoints For disk detection on a full DataCube, the calculation can be performed - on the CPU, GPU or a cluster. By default the CPU is used. If `CUDA` is set - to True, tries to use the GPU. If `CUDA_batched` is also set to True, - batches the FFT/IFFT computations on the GPU. For distribution to a cluster, - distributed must be set to a dictionary, with contents describing how + on the CPU, GPU, or using dask or ipyparallel. By default the CPU is used. + If `CUDA` is set to True, tries to use the GPU. If `CUDA_batched` is also set + to True, batches the FFT/IFFT computations on the GPU. For distribution to a + cluster, distributed must be set to a dictionary, with contents describing how distributed processing should be performed - see below for details. @@ -141,6 +143,9 @@ def find_Bragg_disks( processing if distributed is None, which is the default, processing will be in serial + dask (dict): if not None, indictates dask should be used. Must then be a + dictionary with arguments to pass to the dask detection function. + Valid arguments are (...). See docstring for (...) for details. _qt_progress_bar (QProgressBar instance): used only by the GUI for serial execution @@ -153,6 +158,8 @@ def find_Bragg_disks( - a (DataCube,rx,ry) 3-tuple, returns a list of QPoints instances """ + # TODO add checks about ensuring Dask and Cuda aren't both passed i.e. ensure user knows + # behaviour # parse args @@ -196,11 +203,13 @@ def find_Bragg_disks( mode = 'dc_GPU' else: mode = 'dc_GPU_batched' + elif dask is not None: + mode = 'dc_dask' else: x = _parse_distributed(distributed) connect, data_file, cluster_path, distributed_mode = x if distributed_mode == 'dask': - mode = 'dc_dask' + mode = 'dc_dask_old' elif distributed_mode == 'ipyparallel': mode = 'dc_ipyparallel' else: @@ -222,6 +231,9 @@ def find_Bragg_disks( kws['connect'] = connect kws['data_file'] = data_file kws['cluster_path'] = cluster_path + # dask kwargs + if dask is not None: + kws.update(dask) # run and return ans = fn( @@ -243,7 +255,8 @@ def find_Bragg_disks( return ans - +# TODO add extra skeleton func which imports betaparallel and returns it if added dask_cuda +# TODO add MLAI at some point def _get_function_dictionary(): d = { @@ -252,14 +265,19 @@ def _get_function_dictionary(): "dc_CPU" : _find_Bragg_disks_CPU, "dc_GPU" : _find_Bragg_disks_CUDA_unbatched, "dc_GPU_batched" : _find_Bragg_disks_CUDA_batched, - "dc_dask" : _find_Bragg_disks_dask, + "dc_dask_old" : _find_Bragg_disks_dask, + # "dc_dask" : beta_parallel_disk_detection, + "dc_dask" : place_holder, + "dc_ipyparallel" : _find_Bragg_disks_ipp, } return d - - +# TODO change the name to something better +def place_holder(): + from .diskdetection_parallel_new import beta_parallel_disk_detection + return beta_parallel_disk_detection # Single diffraction pattern diff --git a/py4DSTEM/process/diskdetection/diskdetection_parallel_new.py b/py4DSTEM/process/diskdetection/diskdetection_parallel_new.py index cff426026..1dabbbf20 100644 --- a/py4DSTEM/process/diskdetection/diskdetection_parallel_new.py +++ b/py4DSTEM/process/diskdetection/diskdetection_parallel_new.py @@ -236,6 +236,9 @@ def beta_parallel_disk_detection(dataset, braggvectors._v_uncal = peaks + # TODO Remove ability to return the clinet + # TODO RE-VISIT IF NEEDED TO RETURN + # Clean up dask related stuff dask_client.cancel(_temp_peaks) # removes from the dask workers del _temp_peaks # deletes the object From 62263b49ce11e7a44b21ecc60c1c7532ffc252da Mon Sep 17 00:00:00 2001 From: alex rakowski Date: Tue, 8 Nov 2022 22:40:44 +0000 Subject: [PATCH 07/10] updating find_Bragg_disks_single name --- py4DSTEM/process/diskdetection/diskdetection_parallel_new.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/py4DSTEM/process/diskdetection/diskdetection_parallel_new.py b/py4DSTEM/process/diskdetection/diskdetection_parallel_new.py index be1b8b991..f77ad25c3 100644 --- a/py4DSTEM/process/diskdetection/diskdetection_parallel_new.py +++ b/py4DSTEM/process/diskdetection/diskdetection_parallel_new.py @@ -65,7 +65,7 @@ def register_dill_serializer(): # TODO add ML-AI version def _find_Bragg_disks_single_DP_FK_dask_wrapper(arr, *args,**kwargs): # THis is needed as _find_Bragg_disks_single_DP_FK takes 2D array these arrays have the wrong shape - return _find_Bragg_disks_single(arr[0,0], *args, **kwargs) + return _find_Bragg_disks_single_DP_FK(arr[0,0], *args, **kwargs) #### END OF DASK WRAPPER FUNCTIONS #### From 72567d9da67f1d4e1a5df0f24ca6a60984ca0d97 Mon Sep 17 00:00:00 2001 From: alex rakowski Date: Tue, 13 Dec 2022 18:45:26 +0000 Subject: [PATCH 08/10] updating old func name --- py4DSTEM/process/diskdetection/diskdetection_parallel_new.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/py4DSTEM/process/diskdetection/diskdetection_parallel_new.py b/py4DSTEM/process/diskdetection/diskdetection_parallel_new.py index f77ad25c3..6210dec66 100644 --- a/py4DSTEM/process/diskdetection/diskdetection_parallel_new.py +++ b/py4DSTEM/process/diskdetection/diskdetection_parallel_new.py @@ -11,7 +11,7 @@ from py4DSTEM.io.datastructure.py4dstem import DataCube, QPoints, BraggVectors, PointListArray, PointList -from py4DSTEM.process.diskdetection.diskdetection import _find_Bragg_disks_single_DP_FK +from py4DSTEM.process.diskdetection.diskdetection import _find_Bragg_disks_single from py4DSTEM.io import PointListArray, PointList, datastructure import time @@ -65,7 +65,7 @@ def register_dill_serializer(): # TODO add ML-AI version def _find_Bragg_disks_single_DP_FK_dask_wrapper(arr, *args,**kwargs): # THis is needed as _find_Bragg_disks_single_DP_FK takes 2D array these arrays have the wrong shape - return _find_Bragg_disks_single_DP_FK(arr[0,0], *args, **kwargs) + return _find_Bragg_disks_single(arr[0,0], *args, **kwargs) #### END OF DASK WRAPPER FUNCTIONS #### From 0844c0dae2df9f20cdf67dd229ca0b9cbf36c8a6 Mon Sep 17 00:00:00 2001 From: alex rakowski Date: Tue, 13 Dec 2022 18:46:23 +0000 Subject: [PATCH 09/10] updating how to handle dask params --- py4DSTEM/io/datastructure/py4dstem/datacube_fns.py | 7 ++++++- py4DSTEM/process/diskdetection/diskdetection.py | 10 ++++++---- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/py4DSTEM/io/datastructure/py4dstem/datacube_fns.py b/py4DSTEM/io/datastructure/py4dstem/datacube_fns.py index ccc4edccb..2050c4f94 100644 --- a/py4DSTEM/io/datastructure/py4dstem/datacube_fns.py +++ b/py4DSTEM/io/datastructure/py4dstem/datacube_fns.py @@ -777,12 +777,15 @@ def find_Bragg_disks( CUDA = False, CUDA_batched = True, distributed = None, - dask = None, + dask = True, + dask_params = None, _qt_progress_bar = None, name = 'braggvectors', returncalc = True, + + **kwargs ): """ Finds the Bragg disks by cross correlation with `template`. @@ -927,8 +930,10 @@ def find_Bragg_disks( CUDA_batched = CUDA_batched, distributed = distributed, dask = dask, + dask_params = dask_params, _qt_progress_bar = _qt_progress_bar, + **kwargs ) diff --git a/py4DSTEM/process/diskdetection/diskdetection.py b/py4DSTEM/process/diskdetection/diskdetection.py index dbae8d576..c6deb8977 100644 --- a/py4DSTEM/process/diskdetection/diskdetection.py +++ b/py4DSTEM/process/diskdetection/diskdetection.py @@ -36,7 +36,8 @@ def find_Bragg_disks( CUDA = False, CUDA_batched = True, distributed = None, - dask = None, + dask : bool = False, + dask_params : dict = None, _qt_progress_bar = None, **kws): @@ -204,7 +205,7 @@ def find_Bragg_disks( mode = 'dc_GPU' else: mode = 'dc_GPU_batched' - elif dask is not None: + elif dask: mode = 'dc_dask' else: x = _parse_distributed(distributed) @@ -233,8 +234,8 @@ def find_Bragg_disks( kws['data_file'] = data_file kws['cluster_path'] = cluster_path # dask kwargs - if dask is not None: - kws.update(dask) + if dask_params is not None: + kws.update(dask_params) # run and return ans = fn( @@ -740,6 +741,7 @@ def _parse_distributed(distributed): elif "dask" in distributed: mode = 'dask' + print(type(distributed)) if "client" in distributed["dask"]: connect = distributed["dask"]["client"] else: From b5d7e2e49122813bd50aacbce79e352d6e2df4b6 Mon Sep 17 00:00:00 2001 From: Ben Savitzky Date: Tue, 13 Dec 2022 11:08:34 -0800 Subject: [PATCH 10/10] adds test files for dask --- py4DSTEM/test/dask/diskdetection.py | 15 +++++++++++++++ py4DSTEM/test/dask/io.py | 11 +++++++++++ py4DSTEM/test/dask/virtualimage.py | 11 +++++++++++ 3 files changed, 37 insertions(+) create mode 100644 py4DSTEM/test/dask/diskdetection.py create mode 100644 py4DSTEM/test/dask/io.py create mode 100644 py4DSTEM/test/dask/virtualimage.py diff --git a/py4DSTEM/test/dask/diskdetection.py b/py4DSTEM/test/dask/diskdetection.py new file mode 100644 index 000000000..c91ed52b4 --- /dev/null +++ b/py4DSTEM/test/dask/diskdetection.py @@ -0,0 +1,15 @@ +# Test dask disk detection functionality + +# Devices use cases: +# - local machine +# - cluster + +# Storage use case: +# - as dask array +# - as mem map +# - in RAM + +# Future cases: +# - GPU + dask + + diff --git a/py4DSTEM/test/dask/io.py b/py4DSTEM/test/dask/io.py new file mode 100644 index 000000000..5aca1e131 --- /dev/null +++ b/py4DSTEM/test/dask/io.py @@ -0,0 +1,11 @@ +# Test dask i/o functionality + +# Cases: +# - load a datacube "normally", i.e. into memory, and then convert it to a dask array +# - load a datacube directly from .h5 to a mapped dask array +# - load a datacube into a numpy memmap, and then work on that as a dask array + + + + + diff --git a/py4DSTEM/test/dask/virtualimage.py b/py4DSTEM/test/dask/virtualimage.py new file mode 100644 index 000000000..45dcee431 --- /dev/null +++ b/py4DSTEM/test/dask/virtualimage.py @@ -0,0 +1,11 @@ +# Test virtual imaging with dask + + +# Do speed testing + + +# Use cases to test: +# - no center shifting +# - center shifting + +