diff --git a/pychunkedgraph/graph/cache.py b/pychunkedgraph/graph/cache.py index 13fa962ae..2cde01723 100644 --- a/pychunkedgraph/graph/cache.py +++ b/pychunkedgraph/graph/cache.py @@ -79,7 +79,7 @@ def cross_edges_decorated(node_id): return cross_edges_decorated(node_id) def parents_multiple(self, node_ids: np.ndarray, *, time_stamp: datetime = None): - node_ids = np.array(node_ids, dtype=NODE_ID, copy=False) + node_ids = np.asarray(node_ids, dtype=NODE_ID) if not node_ids.size: return node_ids mask = np.in1d(node_ids, np.fromiter(self.parents_cache.keys(), dtype=NODE_ID)) @@ -93,7 +93,7 @@ def parents_multiple(self, node_ids: np.ndarray, *, time_stamp: datetime = None) def children_multiple(self, node_ids: np.ndarray, *, flatten=False): result = {} - node_ids = np.array(node_ids, dtype=NODE_ID, copy=False) + node_ids = np.asarray(node_ids, dtype=NODE_ID) if not node_ids.size: return result mask = np.in1d(node_ids, np.fromiter(self.children_cache.keys(), dtype=NODE_ID)) @@ -111,7 +111,7 @@ def cross_chunk_edges_multiple( self, node_ids: np.ndarray, *, time_stamp: datetime = None ): result = {} - node_ids = np.array(node_ids, dtype=NODE_ID, copy=False) + node_ids = np.asarray(node_ids, dtype=NODE_ID) if not node_ids.size: return result mask = np.in1d( diff --git a/pychunkedgraph/graph/chunkedgraph.py b/pychunkedgraph/graph/chunkedgraph.py index 1754315d8..c298e3265 100644 --- a/pychunkedgraph/graph/chunkedgraph.py +++ b/pychunkedgraph/graph/chunkedgraph.py @@ -1,5 +1,4 @@ # pylint: disable=invalid-name, missing-docstring, too-many-lines, import-outside-toplevel, unsupported-binary-operation - import time import typing import datetime @@ -765,6 +764,7 @@ def add_edges( source_coords: typing.Sequence[int] = None, sink_coords: typing.Sequence[int] = None, allow_same_segment_merge: typing.Optional[bool] = False, + stitch_mode: typing.Optional[bool] = False, ) -> operation.GraphEditOperation.Result: """ Adds an edge to the chunkedgraph @@ -781,6 +781,7 @@ def add_edges( source_coords=source_coords, sink_coords=sink_coords, allow_same_segment_merge=allow_same_segment_merge, + stitch_mode=stitch_mode, ).execute() def remove_edges( @@ -911,7 +912,7 @@ def get_chunk_coordinates_multiple(self, node_or_chunk_ids: typing.Sequence): node_or_chunk_ids, dtype=basetypes.NODE_ID, copy=False ) layers = self.get_chunk_layers(node_or_chunk_ids) - assert np.all(layers == layers[0]), "All IDs must have the same layer." + assert len(layers) == 0 or np.all(layers == layers[0]), "All IDs must have the same layer." return chunk_utils.get_chunk_coordinates_multiple(self.meta, node_or_chunk_ids) def get_chunk_id( diff --git a/pychunkedgraph/graph/chunks/utils.py b/pychunkedgraph/graph/chunks/utils.py index f22a4d84a..d479fd03e 100644 --- a/pychunkedgraph/graph/chunks/utils.py +++ b/pychunkedgraph/graph/chunks/utils.py @@ -98,7 +98,7 @@ def get_chunk_coordinates_multiple(meta, ids: np.ndarray) -> np.ndarray: y_offset = x_offset - bits_per_dim z_offset = y_offset - bits_per_dim - ids = np.array(ids, dtype=int, copy=False) + ids = np.asarray(ids, dtype=int) X = ids >> x_offset & 2**bits_per_dim - 1 Y = ids >> y_offset & 2**bits_per_dim - 1 Z = ids >> z_offset & 2**bits_per_dim - 1 @@ -152,7 +152,7 @@ def get_chunk_ids_from_node_ids(meta, ids: Iterable[np.uint64]) -> np.ndarray: bits_per_dims = np.array([meta.bitmasks[l] for l in get_chunk_layers(meta, ids)]) offsets = 64 - meta.graph_config.LAYER_ID_BITS - 3 * bits_per_dims - ids = np.array(ids, dtype=int, copy=False) + ids = np.asarray(ids, dtype=int) cids1 = np.array((ids >> offsets) << offsets, dtype=np.uint64) # cids2 = np.vectorize(get_chunk_id)(meta, ids) # assert np.all(cids1 == cids2) diff --git a/pychunkedgraph/graph/edges/__init__.py b/pychunkedgraph/graph/edges/__init__.py index 3574f9b6c..017e48002 100644 --- a/pychunkedgraph/graph/edges/__init__.py +++ b/pychunkedgraph/graph/edges/__init__.py @@ -54,22 +54,20 @@ def __init__( affinities: Optional[np.ndarray] = None, areas: Optional[np.ndarray] = None, ): - self.node_ids1 = np.array(node_ids1, dtype=basetypes.NODE_ID, copy=False) - self.node_ids2 = np.array(node_ids2, dtype=basetypes.NODE_ID, copy=False) + self.node_ids1 = np.array(node_ids1, dtype=basetypes.NODE_ID) + self.node_ids2 = np.array(node_ids2, dtype=basetypes.NODE_ID) assert self.node_ids1.size == self.node_ids2.size self._as_pairs = None if affinities is not None and len(affinities) > 0: - self._affinities = np.array( - affinities, dtype=basetypes.EDGE_AFFINITY, copy=False - ) + self._affinities = np.array(affinities, dtype=basetypes.EDGE_AFFINITY) assert self.node_ids1.size == self._affinities.size else: self._affinities = np.full(len(self.node_ids1), DEFAULT_AFFINITY) if areas is not None and len(areas) > 0: - self._areas = np.array(areas, dtype=basetypes.EDGE_AREA, copy=False) + self._areas = np.array(areas, dtype=basetypes.EDGE_AREA) assert self.node_ids1.size == self._areas.size else: self._areas = np.full(len(self.node_ids1), DEFAULT_AREA) diff --git a/pychunkedgraph/graph/edits.py b/pychunkedgraph/graph/edits.py index 4e66a12d3..3c1756575 100644 --- a/pychunkedgraph/graph/edits.py +++ b/pychunkedgraph/graph/edits.py @@ -68,7 +68,9 @@ def _analyze_affected_edges( def _get_relevant_components(edges: np.ndarray, supervoxels: np.ndarray) -> Tuple: - edges = np.concatenate([edges, np.vstack([supervoxels, supervoxels]).T]) + edges = np.concatenate([edges, np.vstack([supervoxels, supervoxels]).T]).astype( + basetypes.NODE_ID + ) graph, _, _, graph_ids = flatgraph.build_gt_graph(edges, make_directed=True) ccs = flatgraph.connected_components(graph) relevant_ccs = [] @@ -107,8 +109,10 @@ def merge_preprocess( active_edges.append(active) inactive_edges.append(inactive) - relevant_ccs = _get_relevant_components(np.concatenate(active_edges), supervoxels) - inactive = np.concatenate(inactive_edges) + relevant_ccs = _get_relevant_components( + np.concatenate(active_edges).astype(basetypes.NODE_ID), supervoxels + ) + inactive = np.concatenate(inactive_edges).astype(basetypes.NODE_ID) _inactive = [types.empty_2d] # source to sink edges source_mask = np.in1d(inactive[:, 0], relevant_ccs[0]) @@ -119,7 +123,7 @@ def merge_preprocess( sink_mask = np.in1d(inactive[:, 1], relevant_ccs[0]) source_mask = np.in1d(inactive[:, 0], relevant_ccs[1]) _inactive.append(inactive[source_mask & sink_mask]) - _inactive = np.concatenate(_inactive) + _inactive = np.concatenate(_inactive).astype(basetypes.NODE_ID) return np.unique(_inactive, axis=0) if _inactive.size else types.empty_2d @@ -187,14 +191,15 @@ def add_edges( time_stamp: datetime.datetime = None, parent_ts: datetime.datetime = None, allow_same_segment_merge=False, + stitch_mode: bool = False, ): edges, l2_cross_edges_d = _analyze_affected_edges( cg, atomic_edges, parent_ts=parent_ts ) l2ids = np.unique(edges) - if not allow_same_segment_merge: + if not allow_same_segment_merge and not stitch_mode: roots = cg.get_roots(l2ids, assert_roots=True, time_stamp=parent_ts) - assert np.unique(roots).size == 2, "L2 IDs must belong to different roots." + assert np.unique(roots).size >= 2, "L2 IDs must belong to different roots." new_old_id_d = defaultdict(set) old_new_id_d = defaultdict(set) @@ -217,7 +222,9 @@ def add_edges( # update cache # map parent to new merged children and vice versa - merged_children = np.concatenate([atomic_children_d[l2id] for l2id in l2ids_]) + merged_children = np.concatenate( + [atomic_children_d[l2id] for l2id in l2ids_] + ).astype(basetypes.NODE_ID) cg.cache.children_cache[new_id] = merged_children cache_utils.update(cg.cache.parents_cache, merged_children, new_id) @@ -244,6 +251,7 @@ def add_edges( operation_id=operation_id, time_stamp=time_stamp, parent_ts=parent_ts, + stitch_mode=stitch_mode, ) new_roots = create_parents.run() @@ -285,9 +293,8 @@ def _split_l2_agglomeration( cross_edges = cross_edges[~in2d(cross_edges, removed_edges)] isolated_ids = agg.supervoxels[~np.in1d(agg.supervoxels, chunk_edges)] isolated_edges = np.column_stack((isolated_ids, isolated_ids)) - graph, _, _, graph_ids = flatgraph.build_gt_graph( - np.concatenate([chunk_edges, isolated_edges]), make_directed=True - ) + _edges = np.concatenate([chunk_edges, isolated_edges]).astype(basetypes.NODE_ID) + graph, _, _, graph_ids = flatgraph.build_gt_graph(_edges, make_directed=True) return flatgraph.connected_components(graph), graph_ids, cross_edges @@ -331,7 +338,7 @@ def remove_edges( old_hierarchy_d = _init_old_hierarchy(cg, l2ids, parent_ts=parent_ts) chunk_id_map = dict(zip(l2ids.tolist(), cg.get_chunk_ids_from_node_ids(l2ids))) - removed_edges = np.concatenate([atomic_edges, atomic_edges[:, ::-1]], axis=0) + removed_edges = np.concatenate([atomic_edges, atomic_edges[:, ::-1]], axis=0).astype(basetypes.NODE_ID) new_l2_ids = [] for id_ in l2ids: agg = l2id_agglomeration_d[id_] @@ -387,11 +394,11 @@ def _get_flipped_ids(id_map, node_ids): returns old or new ids according to the map """ ids = [ - np.array(list(id_map[id_]), dtype=basetypes.NODE_ID, copy=False) + np.asarray(list(id_map[id_]), dtype=basetypes.NODE_ID) for id_ in node_ids ] ids.append(types.empty_1d) # concatenate needs at least one array - return np.concatenate(ids) + return np.concatenate(ids).astype(basetypes.NODE_ID) def _get_descendants(cg, new_id): @@ -443,7 +450,7 @@ def _update_neighbor_cross_edges_single( edges = fastremap.remap(edges, node_map, preserve_missing_labels=True) if layer == counterpart_layer: reverse_edge = np.array([counterpart, new_id], dtype=basetypes.NODE_ID) - edges = np.concatenate([edges, [reverse_edge]]) + edges = np.concatenate([edges, [reverse_edge]]).astype(basetypes.NODE_ID) descendants = _get_descendants(cg, new_id) mask = np.isin(edges[:, 1], descendants) if np.any(mask): @@ -510,6 +517,7 @@ def __init__( old_new_id_d: Dict[np.uint64, Set[np.uint64]] = None, old_hierarchy_d: Dict[np.uint64, Dict[int, np.uint64]] = None, parent_ts: datetime.datetime = None, + stitch_mode: bool = False, ): self.cg = cg self.new_entries = [] @@ -521,6 +529,7 @@ def __init__( self._operation_id = operation_id self._time_stamp = time_stamp self._last_successful_ts = parent_ts + self.stitch_mode = stitch_mode def _update_id_lineage( self, @@ -552,7 +561,7 @@ def _get_connected_components(self, node_ids: np.ndarray, layer: int): for id_ in node_ids: edges_ = cross_edges_d[id_].get(layer, types.empty_2d) cx_edges.append(edges_) - cx_edges = np.concatenate([*cx_edges, np.vstack([node_ids, node_ids]).T]) + cx_edges = np.concatenate([*cx_edges, np.vstack([node_ids, node_ids]).T]).astype(basetypes.NODE_ID) graph, _, _, graph_ids = flatgraph.build_gt_graph(cx_edges, make_directed=True) return flatgraph.connected_components(graph), graph_ids @@ -568,7 +577,7 @@ def _get_layer_node_ids( mask = np.in1d(siblings, old_ids) node_ids = np.concatenate( [_get_flipped_ids(self._old_new_id_d, old_ids), siblings[~mask], new_ids] - ) + ).astype(basetypes.NODE_ID) node_ids = np.unique(node_ids) layer_mask = self.cg.get_chunk_layers(node_ids) == layer return node_ids[layer_mask] @@ -635,10 +644,16 @@ def _create_new_parents(self, layer: int): if len(cx_edges_d[cc_ids[0]].get(l, types.empty_2d)) > 0: parent_layer = l break - parent = self.cg.id_client.create_node_id( - self.cg.get_parent_chunk_id(cc_ids[0], parent_layer), - root_chunk=parent_layer == self.cg.meta.layer_count, - ) + + while True: + parent = self.cg.id_client.create_node_id( + self.cg.get_parent_chunk_id(cc_ids[0], parent_layer), + root_chunk=parent_layer == self.cg.meta.layer_count, + ) + _entry = self.cg.client.read_node(parent) + if _entry == {}: + break + self._new_ids_d[parent_layer].append(parent) self._update_id_lineage(parent, cc_ids, layer, parent_layer) self.cg.cache.children_cache[parent] = cc_ids @@ -689,6 +704,8 @@ def run(self) -> Iterable: return self._new_ids_d[self.cg.meta.layer_count] def _update_root_id_lineage(self): + if self.stitch_mode: + return new_roots = self._new_ids_d[self.cg.meta.layer_count] former_roots = _get_flipped_ids(self._new_old_id_d, new_roots) former_roots = np.unique(former_roots) diff --git a/pychunkedgraph/graph/operation.py b/pychunkedgraph/graph/operation.py index 8c5d4484e..a07045cff 100644 --- a/pychunkedgraph/graph/operation.py +++ b/pychunkedgraph/graph/operation.py @@ -555,6 +555,7 @@ class MergeOperation(GraphEditOperation): "affinities", "bbox_offset", "allow_same_segment_merge", + "stitch_mode", ] def __init__( @@ -568,6 +569,7 @@ def __init__( bbox_offset: Tuple[int, int, int] = (240, 240, 24), affinities: Optional[Sequence[np.float32]] = None, allow_same_segment_merge: Optional[bool] = False, + stitch_mode: bool = False, ) -> None: super().__init__( cg, user_id=user_id, source_coords=source_coords, sink_coords=sink_coords @@ -575,6 +577,7 @@ def __init__( self.added_edges = np.atleast_2d(added_edges).astype(basetypes.NODE_ID) self.bbox_offset = np.atleast_1d(bbox_offset).astype(basetypes.COORDINATES) self.allow_same_segment_merge = allow_same_segment_merge + self.stitch_mode = stitch_mode self.affinities = None if affinities is not None: @@ -605,34 +608,43 @@ def _apply( ) ) if len(root_ids) < 2 and not self.allow_same_segment_merge: - raise PreconditionError("Supervoxels must belong to different objects.") - bbox = get_bbox(self.source_coords, self.sink_coords, self.bbox_offset) - with TimeIt("subgraph", self.cg.graph_id, operation_id): - edges = self.cg.get_subgraph( - root_ids, - bbox=bbox, - bbox_is_coordinate=True, - edges_only=True, + raise PreconditionError( + "Supervoxels must belong to different objects." + f" Tried to merge {self.added_edges.ravel()}," + f" which all belong to {tuple(root_ids)[0]}." ) - if self.allow_same_segment_merge: - inactive_edges = types.empty_2d - else: - with TimeIt("preprocess", self.cg.graph_id, operation_id): - inactive_edges = edits.merge_preprocess( - self.cg, - subgraph_edges=edges, - supervoxels=self.added_edges.ravel(), - parent_ts=self.parent_ts, + atomic_edges = self.added_edges + fake_edge_rows = [] + if not self.stitch_mode: + bbox = get_bbox(self.source_coords, self.sink_coords, self.bbox_offset) + with TimeIt("subgraph", self.cg.graph_id, operation_id): + edges = self.cg.get_subgraph( + root_ids, + bbox=bbox, + bbox_is_coordinate=True, + edges_only=True, ) - atomic_edges, fake_edge_rows = edits.check_fake_edges( - self.cg, - atomic_edges=self.added_edges, - inactive_edges=inactive_edges, - time_stamp=timestamp, - parent_ts=self.parent_ts, - ) + if self.allow_same_segment_merge: + inactive_edges = types.empty_2d + else: + with TimeIt("preprocess", self.cg.graph_id, operation_id): + inactive_edges = edits.merge_preprocess( + self.cg, + subgraph_edges=edges, + supervoxels=self.added_edges.ravel(), + parent_ts=self.parent_ts, + ) + + atomic_edges, fake_edge_rows = edits.check_fake_edges( + self.cg, + atomic_edges=self.added_edges, + inactive_edges=inactive_edges, + time_stamp=timestamp, + parent_ts=self.parent_ts, + ) + with TimeIt("add_edges", self.cg.graph_id, operation_id): new_roots, new_l2_ids, new_entries = edits.add_edges( self.cg, @@ -641,6 +653,7 @@ def _apply( time_stamp=timestamp, parent_ts=self.parent_ts, allow_same_segment_merge=self.allow_same_segment_merge, + stitch_mode=self.stitch_mode, ) return new_roots, new_l2_ids, fake_edge_rows + new_entries @@ -857,12 +870,14 @@ def __init__( "try placing the points further apart." ) - ids = np.concatenate([self.source_ids, self.sink_ids]) + ids = np.concatenate([self.source_ids, self.sink_ids]).astype(basetypes.NODE_ID) layers = self.cg.get_chunk_layers(ids) assert np.sum(layers) == layers.size, "IDs must be supervoxels." def _update_root_ids(self) -> np.ndarray: - sink_and_source_ids = np.concatenate((self.source_ids, self.sink_ids)) + sink_and_source_ids = np.concatenate((self.source_ids, self.sink_ids)).astype( + basetypes.NODE_ID + ) root_ids = np.unique( self.cg.get_roots( sink_and_source_ids, assert_roots=True, time_stamp=self.parent_ts @@ -878,7 +893,9 @@ def _apply( # Verify that sink and source are from the same root object root_ids = set( self.cg.get_roots( - np.concatenate([self.source_ids, self.sink_ids]), + np.concatenate([self.source_ids, self.sink_ids]).astype( + basetypes.NODE_ID + ), assert_roots=True, time_stamp=self.parent_ts, ) @@ -899,7 +916,7 @@ def _apply( edges = reduce(lambda x, y: x + y, edges_tuple, Edges([], [])) supervoxels = np.concatenate( [agg.supervoxels for agg in l2id_agglomeration_d.values()] - ) + ).astype(basetypes.NODE_ID) mask0 = np.in1d(edges.node_ids1, supervoxels) mask1 = np.in1d(edges.node_ids2, supervoxels) edges = edges[mask0 & mask1] diff --git a/pychunkedgraph/graph/utils/serializers.py b/pychunkedgraph/graph/utils/serializers.py index 09c0f63b0..3b0101d86 100644 --- a/pychunkedgraph/graph/utils/serializers.py +++ b/pychunkedgraph/graph/utils/serializers.py @@ -41,7 +41,9 @@ def _deserialize(val, dtype, shape=None, order=None): def __init__(self, dtype, shape=None, order=None, compression_level=None): super().__init__( - serializer=lambda x: x.newbyteorder(dtype.byteorder).tobytes(), + serializer=lambda x: x.view( + x.dtype.newbyteorder(dtype.byteorder) + ).tobytes(), deserializer=lambda x: NumPyArray._deserialize( x, dtype, shape=shape, order=order ), @@ -53,7 +55,9 @@ def __init__(self, dtype, shape=None, order=None, compression_level=None): class NumPyValue(_Serializer): def __init__(self, dtype): super().__init__( - serializer=lambda x: x.newbyteorder(dtype.byteorder).tobytes(), + serializer=lambda x: x.view( + x.dtype.newbyteorder(dtype.byteorder) + ).tobytes(), deserializer=lambda x: np.frombuffer(x, dtype=dtype)[0], basetype=dtype.type, ) @@ -96,7 +100,7 @@ def __init__(self): def pad_node_id(node_id: np.uint64) -> str: - """ Pad node id to 20 digits + """Pad node id to 20 digits :param node_id: int :return: str @@ -105,7 +109,7 @@ def pad_node_id(node_id: np.uint64) -> str: def serialize_uint64(node_id: np.uint64, counter=False, fake_edges=False) -> bytes: - """ Serializes an id to be ingested by a bigtable table row + """Serializes an id to be ingested by a bigtable table row :param node_id: int :return: str @@ -118,7 +122,7 @@ def serialize_uint64(node_id: np.uint64, counter=False, fake_edges=False) -> byt def serialize_uint64s_to_regex(node_ids: Iterable[np.uint64]) -> bytes: - """ Serializes an id to be ingested by a bigtable table row + """Serializes an id to be ingested by a bigtable table row :param node_id: int :return: str @@ -128,7 +132,7 @@ def serialize_uint64s_to_regex(node_ids: Iterable[np.uint64]) -> bytes: def deserialize_uint64(node_id: bytes, fake_edges=False) -> np.uint64: - """ De-serializes a node id from a BigTable row + """De-serializes a node id from a BigTable row :param node_id: bytes :return: np.uint64 @@ -139,7 +143,7 @@ def deserialize_uint64(node_id: bytes, fake_edges=False) -> np.uint64: def serialize_key(key: str) -> bytes: - """ Serializes a key to be ingested by a bigtable table row + """Serializes a key to be ingested by a bigtable table row :param key: str :return: bytes @@ -148,7 +152,7 @@ def serialize_key(key: str) -> bytes: def deserialize_key(key: bytes) -> str: - """ Deserializes a row key + """Deserializes a row key :param key: bytes :return: str diff --git a/pychunkedgraph/ingest/create/atomic_layer.py b/pychunkedgraph/ingest/create/atomic_layer.py index 0a7aae728..e235d36d4 100644 --- a/pychunkedgraph/ingest/create/atomic_layer.py +++ b/pychunkedgraph/ingest/create/atomic_layer.py @@ -68,8 +68,10 @@ def _get_chunk_nodes_and_edges(chunk_edges_d: dict, isolated_ids: Sequence[int]) in-chunk edges and nodes_ids """ isolated_nodes_self_edges = np.vstack([isolated_ids, isolated_ids]).T - node_ids = [isolated_ids] - edge_ids = [isolated_nodes_self_edges] + node_ids = [isolated_ids] if len(isolated_ids) != 0 else [] + edge_ids = ( + [isolated_nodes_self_edges] if len(isolated_nodes_self_edges) != 0 else [] + ) for edge_type in EDGE_TYPES: edges = chunk_edges_d[edge_type] node_ids.append(edges.node_ids1) @@ -77,9 +79,9 @@ def _get_chunk_nodes_and_edges(chunk_edges_d: dict, isolated_ids: Sequence[int]) node_ids.append(edges.node_ids2) edge_ids.append(edges.get_pairs()) - chunk_node_ids = np.unique(np.concatenate(node_ids)) + chunk_node_ids = np.unique(np.concatenate(node_ids).astype(basetypes.NODE_ID)) edge_ids.append(np.vstack([chunk_node_ids, chunk_node_ids]).T) - return (chunk_node_ids, np.concatenate(edge_ids)) + return (chunk_node_ids, np.concatenate(edge_ids).astype(basetypes.NODE_ID)) def _get_remapping(chunk_edges_d: dict): @@ -116,7 +118,7 @@ def _process_component( r_key = serializers.serialize_uint64(node_id) nodes.append(cg.client.mutate_row(r_key, val_dict, time_stamp=time_stamp)) - chunk_out_edges = np.concatenate(chunk_out_edges) + chunk_out_edges = np.concatenate(chunk_out_edges).astype(basetypes.NODE_ID) cce_layers = cg.get_cross_chunk_edges_layer(chunk_out_edges) u_cce_layers = np.unique(cce_layers) @@ -147,5 +149,7 @@ def _get_outgoing_edges(node_id, chunk_edges_d, sparse_indices, remapping): ] row_ids = row_ids[column_ids == 0] # edges that this node is part of - chunk_out_edges = np.concatenate([chunk_out_edges, edges[row_ids]]) + chunk_out_edges = np.concatenate([chunk_out_edges, edges[row_ids]]).astype( + basetypes.NODE_ID + ) return chunk_out_edges diff --git a/pychunkedgraph/ingest/create/parent_layer.py b/pychunkedgraph/ingest/create/parent_layer.py index 90b24d26a..dfdb48dac 100644 --- a/pychunkedgraph/ingest/create/parent_layer.py +++ b/pychunkedgraph/ingest/create/parent_layer.py @@ -73,7 +73,7 @@ def _read_children_chunks( children_ids = [types.empty_1d] for child_coord in children_coords: children_ids.append(_read_chunk([], cg, layer_id - 1, child_coord)) - return np.concatenate(children_ids) + return np.concatenate(children_ids).astype(basetypes.NODE_ID) with mp.Manager() as manager: children_ids_shared = manager.list() @@ -92,7 +92,7 @@ def _read_children_chunks( multi_args, n_threads=min(len(multi_args), mp.cpu_count()), ) - return np.concatenate(children_ids_shared) + return np.concatenate(children_ids_shared).astype(basetypes.NODE_ID) def _read_chunk_helper(args): diff --git a/pychunkedgraph/ingest/ran_agglomeration.py b/pychunkedgraph/ingest/ran_agglomeration.py index a0ca42d54..d726ba4a5 100644 --- a/pychunkedgraph/ingest/ran_agglomeration.py +++ b/pychunkedgraph/ingest/ran_agglomeration.py @@ -314,7 +314,9 @@ def get_active_edges(edges_d, mapping): if edge_type == EDGE_TYPES.in_chunk: pseudo_isolated_ids.append(edges.node_ids2) - return chunk_edges_active, np.unique(np.concatenate(pseudo_isolated_ids)) + return chunk_edges_active, np.unique( + np.concatenate(pseudo_isolated_ids).astype(basetypes.NODE_ID) + ) def define_active_edges(edge_dict, mapping) -> Union[Dict, np.ndarray]: @@ -380,7 +382,7 @@ def read_raw_agglomeration_data(imanager: IngestionManager, chunk_coord: np.ndar edges_list = _read_agg_files(filenames, chunk_ids, path) G = nx.Graph() - G.add_edges_from(np.concatenate(edges_list)) + G.add_edges_from(np.concatenate(edges_list).astype(basetypes.NODE_ID)) mapping = {} components = list(nx.connected_components(G)) for i_cc, cc in enumerate(components): diff --git a/pychunkedgraph/meshing/meshgen.py b/pychunkedgraph/meshing/meshgen.py index a8da89b1f..1fd4cf5b4 100644 --- a/pychunkedgraph/meshing/meshgen.py +++ b/pychunkedgraph/meshing/meshgen.py @@ -935,6 +935,7 @@ def chunk_initial_mesh_task( cv = CloudVolume( f"graphene://https://localhost/segmentation/table/dummy", info=meshgen_utils.get_json_info(cg), + secrets={"token": "dummy"}, ) sharding_info = cv.mesh.meta.info["sharding"]["2"] sharding_spec = ShardingSpecification.from_dict(sharding_info) @@ -1123,6 +1124,7 @@ def chunk_stitch_remeshing_task( f"graphene://https://localhost/segmentation/table/dummy", mesh_dir=cv_sharded_mesh_dir, info=meshgen_utils.get_json_info(cg), + secrets={"token": "dummy"}, ) fragments_in_batch_processed = 0 @@ -1257,6 +1259,7 @@ def chunk_initial_sharded_stitching_task( cv = CloudVolume( f"graphene://https://localhost/segmentation/table/dummy", info=meshgen_utils.get_json_info(cg), + secrets={"token": "dummy"}, ) shard_filenames = [] shard_to_chunk_id = {} diff --git a/requirements.in b/requirements.in index 4fcd353ed..bf735af22 100644 --- a/requirements.in +++ b/requirements.in @@ -18,15 +18,15 @@ werkzeug tensorstore # PyPI only: -cloud-files>=4.21.1 -cloud-volume>=8.26.0 +cloud-files>=5.3.0 +cloud-volume>=12.2.0 multiwrapper middle-auth-client>=3.11.0 zmesh>=1.7.0 fastremap>=1.14.0 task-queue>=2.13.0 messagingclient -dracopy>=1.3.0 +dracopy>=1.5.0 datastoreflex>=0.5.0 zstandard==0.21.0 diff --git a/requirements.txt b/requirements.txt index 0eedacb31..35014d4de 100644 --- a/requirements.txt +++ b/requirements.txt @@ -41,22 +41,18 @@ click==8.1.7 # -r requirements.in # cloud-files # compressed-segmentation - # compresso # flask + # microviewer # rq # task-queue -cloud-files==4.21.1 +cloud-files==5.3.0 # via # -r requirements.in # cloud-volume # datastoreflex -cloud-volume==8.26.0 +cloud-volume==12.2.0 # via -r requirements.in -compressed-segmentation==2.2.1 - # via cloud-volume -compresso==3.2.1 - # via cloud-volume -crackle-codec==0.7.0 +compressed-segmentation==2.3.2 # via cloud-volume crc32c==2.3.post0 # via cloud-files @@ -68,7 +64,7 @@ dill==0.3.7 # via # multiprocess # pathos -dracopy==1.3.0 +dracopy==1.5.0 # via # -r requirements.in # cloud-volume @@ -78,7 +74,7 @@ fastremap==1.14.0 # via # -r requirements.in # cloud-volume - # crackle-codec + # osteoid flask==2.3.3 # via # -r requirements.in @@ -86,8 +82,6 @@ flask==2.3.3 # middle-auth-client flask-cors==4.0.0 # via -r requirements.in -fpzip==1.2.2 - # via cloud-volume furl==2.1.3 # via middle-auth-client gevent==23.9.1 @@ -189,9 +183,11 @@ markupsafe==2.1.3 # werkzeug messagingclient==0.1.3 # via -r requirements.in +microviewer==1.13.1 + # via cloud-volume middle-auth-client==3.16.1 # via -r requirements.in -ml-dtypes==0.3.2 +ml-dtypes==0.5.1 # via tensorstore multiprocess==0.70.15 # via pathos @@ -201,24 +197,22 @@ networkx==3.1 # via # -r requirements.in # cloud-volume + # osteoid numpy==1.26.0 # via # -r requirements.in # cloud-volume # compressed-segmentation - # compresso - # crackle-codec # fastremap - # fpzip # messagingclient + # microviewer # ml-dtypes # multiwrapper + # osteoid # pandas - # pyspng-seunglab # simplejpeg # task-queue # tensorstore - # zfpc # zmesh orderedmultidict==1.0.1 # via furl @@ -226,6 +220,8 @@ orjson==3.9.7 # via # cloud-files # task-queue +osteoid==0.3.1 + # via cloud-volume packaging==23.1 # via pytest pandas==2.1.1 @@ -237,8 +233,6 @@ pathos==0.3.1 # task-queue pbr==5.11.1 # via task-queue -pillow==10.0.1 - # via cloud-volume pluggy==1.3.0 # via pytest posix-ipc==1.1.1 @@ -273,12 +267,8 @@ pyasn1==0.5.0 # rsa pyasn1-modules==0.3.0 # via google-auth -pybind11==2.11.1 - # via crackle-codec pysimdjson==5.0.2 # via cloud-volume -pyspng-seunglab==1.1.0 - # via cloud-volume pytest==7.4.2 # via compressed-segmentation python-dateutil==2.8.2 @@ -340,7 +330,7 @@ tenacity==8.2.3 # cloud-files # cloud-volume # task-queue -tensorstore==0.1.53 +tensorstore==0.1.75 # via -r requirements.in tqdm==4.66.1 # via @@ -360,10 +350,6 @@ werkzeug==2.3.8 # via # -r requirements.in # flask -zfpc==0.1.2 - # via cloud-volume -zfpy==1.0.0 - # via zfpc zmesh==1.7.0 # via -r requirements.in zope-event==5.0