Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions pychunkedgraph/app/segmentation/common.py
Original file line number Diff line number Diff line change
Expand Up @@ -791,8 +791,8 @@ def handle_subgraph(table_id, root_id, only_internal_edges=True):
supervoxels = np.concatenate(
[agg.supervoxels for agg in l2id_agglomeration_d.values()]
)
mask0 = np.in1d(edges.node_ids1, supervoxels)
mask1 = np.in1d(edges.node_ids2, supervoxels)
mask0 = np.isin(edges.node_ids1, supervoxels)
mask1 = np.isin(edges.node_ids2, supervoxels)
edges = edges[mask0 & mask1]

return edges
Expand Down
12 changes: 6 additions & 6 deletions pychunkedgraph/graph/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,10 +79,10 @@ def cross_edges_decorated(node_id):
return cross_edges_decorated(node_id)

def parents_multiple(self, node_ids: np.ndarray, *, time_stamp: datetime = None):
node_ids = np.array(node_ids, dtype=NODE_ID, copy=False)
node_ids = np.asarray(node_ids, dtype=NODE_ID)
if not node_ids.size:
return node_ids
mask = np.in1d(node_ids, np.fromiter(self.parents_cache.keys(), dtype=NODE_ID))
mask = np.isin(node_ids, np.fromiter(self.parents_cache.keys(), dtype=NODE_ID))
parents = node_ids.copy()
parents[mask] = self._parent_vec(node_ids[mask])
parents[~mask] = self._cg.get_parents(
Expand All @@ -93,10 +93,10 @@ def parents_multiple(self, node_ids: np.ndarray, *, time_stamp: datetime = None)

def children_multiple(self, node_ids: np.ndarray, *, flatten=False):
result = {}
node_ids = np.array(node_ids, dtype=NODE_ID, copy=False)
node_ids = np.asarray(node_ids, dtype=NODE_ID)
if not node_ids.size:
return result
mask = np.in1d(node_ids, np.fromiter(self.children_cache.keys(), dtype=NODE_ID))
mask = np.isin(node_ids, np.fromiter(self.children_cache.keys(), dtype=NODE_ID))
cached_children_ = self._children_vec(node_ids[mask])
result.update({id_: c_ for id_, c_ in zip(node_ids[mask], cached_children_)})
result.update(self._cg.get_children(node_ids[~mask], raw_only=True))
Expand All @@ -111,10 +111,10 @@ def cross_chunk_edges_multiple(
self, node_ids: np.ndarray, *, time_stamp: datetime = None
):
result = {}
node_ids = np.array(node_ids, dtype=NODE_ID, copy=False)
node_ids = np.asarray(node_ids, dtype=NODE_ID)
if not node_ids.size:
return result
mask = np.in1d(
mask = np.isin(
node_ids, np.fromiter(self.cross_chunk_edges_cache.keys(), dtype=NODE_ID)
)
cached_edges_ = self._cross_chunk_edges_vec(node_ids[mask])
Expand Down
9 changes: 5 additions & 4 deletions pychunkedgraph/graph/chunkedgraph.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
# pylint: disable=invalid-name, missing-docstring, too-many-lines, import-outside-toplevel, unsupported-binary-operation

import time
import typing
import datetime
Expand Down Expand Up @@ -695,8 +694,8 @@ def get_l2_agglomerations(
else:
all_chunk_edges = all_chunk_edges.get_pairs()
supervoxels = self.get_children(level2_ids, flatten=True)
mask0 = np.in1d(all_chunk_edges[:, 0], supervoxels)
mask1 = np.in1d(all_chunk_edges[:, 1], supervoxels)
mask0 = np.isin(all_chunk_edges[:, 0], supervoxels)
mask1 = np.isin(all_chunk_edges[:, 1], supervoxels)
return all_chunk_edges[mask0 & mask1]

l2id_children_d = self.get_children(level2_ids)
Expand Down Expand Up @@ -765,6 +764,7 @@ def add_edges(
source_coords: typing.Sequence[int] = None,
sink_coords: typing.Sequence[int] = None,
allow_same_segment_merge: typing.Optional[bool] = False,
stitch_mode: typing.Optional[bool] = False,
) -> operation.GraphEditOperation.Result:
"""
Adds an edge to the chunkedgraph
Expand All @@ -781,6 +781,7 @@ def add_edges(
source_coords=source_coords,
sink_coords=sink_coords,
allow_same_segment_merge=allow_same_segment_merge,
stitch_mode=stitch_mode,
).execute()

def remove_edges(
Expand Down Expand Up @@ -911,7 +912,7 @@ def get_chunk_coordinates_multiple(self, node_or_chunk_ids: typing.Sequence):
node_or_chunk_ids, dtype=basetypes.NODE_ID, copy=False
)
layers = self.get_chunk_layers(node_or_chunk_ids)
assert np.all(layers == layers[0]), "All IDs must have the same layer."
assert len(layers) == 0 or np.all(layers == layers[0]), "All IDs must have the same layer."
return chunk_utils.get_chunk_coordinates_multiple(self.meta, node_or_chunk_ids)

def get_chunk_id(
Expand Down
4 changes: 2 additions & 2 deletions pychunkedgraph/graph/chunks/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ def get_chunk_coordinates_multiple(meta, ids: np.ndarray) -> np.ndarray:
y_offset = x_offset - bits_per_dim
z_offset = y_offset - bits_per_dim

ids = np.array(ids, dtype=int, copy=False)
ids = np.asarray(ids, dtype=int)
X = ids >> x_offset & 2**bits_per_dim - 1
Y = ids >> y_offset & 2**bits_per_dim - 1
Z = ids >> z_offset & 2**bits_per_dim - 1
Expand Down Expand Up @@ -152,7 +152,7 @@ def get_chunk_ids_from_node_ids(meta, ids: Iterable[np.uint64]) -> np.ndarray:
bits_per_dims = np.array([meta.bitmasks[l] for l in get_chunk_layers(meta, ids)])
offsets = 64 - meta.graph_config.LAYER_ID_BITS - 3 * bits_per_dims

ids = np.array(ids, dtype=int, copy=False)
ids = np.asarray(ids, dtype=int)
cids1 = np.array((ids >> offsets) << offsets, dtype=np.uint64)
# cids2 = np.vectorize(get_chunk_id)(meta, ids)
# assert np.all(cids1 == cids2)
Expand Down
44 changes: 22 additions & 22 deletions pychunkedgraph/graph/cutting.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ def merge_cross_chunk_edges_graph_tool(
if len(mapping) > 0:
mapping = np.concatenate(mapping)
u_nodes = np.unique(edges)
u_unmapped_nodes = u_nodes[~np.in1d(u_nodes, mapping)]
u_unmapped_nodes = u_nodes[~np.isin(u_nodes, mapping)]
unmapped_mapping = np.concatenate(
[u_unmapped_nodes.reshape(-1, 1), u_unmapped_nodes.reshape(-1, 1)], axis=1
)
Expand Down Expand Up @@ -189,9 +189,9 @@ def _build_gt_graph(self, edges, affs):
) = flatgraph.build_gt_graph(comb_edges, comb_affs, make_directed=True)

self.source_graph_ids = np.where(
np.in1d(self.unique_supervoxel_ids, self.sources)
np.isin(self.unique_supervoxel_ids, self.sources)
)[0]
self.sink_graph_ids = np.where(np.in1d(self.unique_supervoxel_ids, self.sinks))[
self.sink_graph_ids = np.where(np.isin(self.unique_supervoxel_ids, self.sinks))[
0
]

Expand Down Expand Up @@ -398,7 +398,7 @@ def _remap_cut_edge_set(self, cut_edge_set):
remapped_cutset_flattened_view = remapped_cutset.view(dtype="u8,u8")
edges_flattened_view = self.cg_edges.view(dtype="u8,u8")

cutset_mask = np.in1d(remapped_cutset_flattened_view, edges_flattened_view)
cutset_mask = np.isin(remapped_cutset_flattened_view, edges_flattened_view)

return remapped_cutset[cutset_mask]

Expand Down Expand Up @@ -432,8 +432,8 @@ def _get_split_preview_connected_components(self, cut_edge_set):
max_sinks = 0
i = 0
for cc in ccs_test_post_cut:
num_sources = np.count_nonzero(np.in1d(self.source_graph_ids, cc))
num_sinks = np.count_nonzero(np.in1d(self.sink_graph_ids, cc))
num_sources = np.count_nonzero(np.isin(self.source_graph_ids, cc))
num_sinks = np.count_nonzero(np.isin(self.sink_graph_ids, cc))
if num_sources > max_sources:
max_sources = num_sources
max_source_index = i
Expand Down Expand Up @@ -486,8 +486,8 @@ def _filter_graph_connected_components(self):
# If connected component contains no sources or no sinks,
# remove its nodes from the mincut computation
if not (
np.any(np.in1d(self.source_graph_ids, cc))
and np.any(np.in1d(self.sink_graph_ids, cc))
np.any(np.isin(self.source_graph_ids, cc))
and np.any(np.isin(self.sink_graph_ids, cc))
):
for node_id in cc:
removed[node_id] = True
Expand Down Expand Up @@ -525,13 +525,13 @@ def _gt_mincut_sanity_check(self, partition):
np.array(np.where(partition.a == i_cc)[0], dtype=int)
]

if np.any(np.in1d(self.sources, cc_list)):
assert np.all(np.in1d(self.sources, cc_list))
assert ~np.any(np.in1d(self.sinks, cc_list))
if np.any(np.isin(self.sources, cc_list)):
assert np.all(np.isin(self.sources, cc_list))
assert ~np.any(np.isin(self.sinks, cc_list))

if np.any(np.in1d(self.sinks, cc_list)):
assert np.all(np.in1d(self.sinks, cc_list))
assert ~np.any(np.in1d(self.sources, cc_list))
if np.any(np.isin(self.sinks, cc_list)):
assert np.all(np.isin(self.sinks, cc_list))
assert ~np.any(np.isin(self.sources, cc_list))

def _sink_and_source_connectivity_sanity_check(self, cut_edge_set):
"""
Expand All @@ -555,19 +555,19 @@ def _sink_and_source_connectivity_sanity_check(self, cut_edge_set):
illegal_split = False
try:
for cc in ccs_test_post_cut:
if np.any(np.in1d(self.source_graph_ids, cc)):
assert np.all(np.in1d(self.source_graph_ids, cc))
assert ~np.any(np.in1d(self.sink_graph_ids, cc))
if np.any(np.isin(self.source_graph_ids, cc)):
assert np.all(np.isin(self.source_graph_ids, cc))
assert ~np.any(np.isin(self.sink_graph_ids, cc))
if (
len(self.source_path_vertices) == len(cc)
and self.disallow_isolating_cut
):
if not self.partition_edges_within_label(cc):
raise IsolatingCutException("Source")

if np.any(np.in1d(self.sink_graph_ids, cc)):
assert np.all(np.in1d(self.sink_graph_ids, cc))
assert ~np.any(np.in1d(self.source_graph_ids, cc))
if np.any(np.isin(self.sink_graph_ids, cc)):
assert np.all(np.isin(self.sink_graph_ids, cc))
assert ~np.any(np.isin(self.source_graph_ids, cc))
if (
len(self.sink_path_vertices) == len(cc)
and self.disallow_isolating_cut
Expand Down Expand Up @@ -664,8 +664,8 @@ def run_split_preview(
supervoxels = np.concatenate(
[agg.supervoxels for agg in l2id_agglomeration_d.values()]
)
mask0 = np.in1d(edges.node_ids1, supervoxels)
mask1 = np.in1d(edges.node_ids2, supervoxels)
mask0 = np.isin(edges.node_ids1, supervoxels)
mask1 = np.isin(edges.node_ids2, supervoxels)
edges = edges[mask0 & mask1]
edges_to_remove, illegal_split = run_multicut(
edges,
Expand Down
10 changes: 4 additions & 6 deletions pychunkedgraph/graph/edges/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,22 +54,20 @@ def __init__(
affinities: Optional[np.ndarray] = None,
areas: Optional[np.ndarray] = None,
):
self.node_ids1 = np.array(node_ids1, dtype=basetypes.NODE_ID, copy=False)
self.node_ids2 = np.array(node_ids2, dtype=basetypes.NODE_ID, copy=False)
self.node_ids1 = np.array(node_ids1, dtype=basetypes.NODE_ID)
self.node_ids2 = np.array(node_ids2, dtype=basetypes.NODE_ID)
assert self.node_ids1.size == self.node_ids2.size

self._as_pairs = None

if affinities is not None and len(affinities) > 0:
self._affinities = np.array(
affinities, dtype=basetypes.EDGE_AFFINITY, copy=False
)
self._affinities = np.array(affinities, dtype=basetypes.EDGE_AFFINITY)
assert self.node_ids1.size == self._affinities.size
else:
self._affinities = np.full(len(self.node_ids1), DEFAULT_AFFINITY)

if areas is not None and len(areas) > 0:
self._areas = np.array(areas, dtype=basetypes.EDGE_AREA, copy=False)
self._areas = np.array(areas, dtype=basetypes.EDGE_AREA)
assert self.node_ids1.size == self._areas.size
else:
self._areas = np.full(len(self.node_ids1), DEFAULT_AREA)
Expand Down
4 changes: 3 additions & 1 deletion pychunkedgraph/graph/edges/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,9 @@ def merge_cross_edge_dicts(x_edges_d1: Dict, x_edges_d2: Dict) -> Dict:
Combines two cross chunk dictionaries of form
{node_id: {layer id : edge list}}.
"""
node_ids = np.unique(list(x_edges_d1.keys()) + list(x_edges_d2.keys()))
node_ids = np.unique(
np.array(list(x_edges_d1.keys()) + list(x_edges_d2.keys()), dtype=basetypes.NODE_ID)
)
result_d = {}
for node_id in node_ids:
cross_edge_ds = [x_edges_d1.get(node_id, {}), x_edges_d2.get(node_id, {})]
Expand Down
Loading
Loading