11# pylint: disable=invalid-name, missing-docstring, line-too-long, no-member
22
3+ import json
4+ import pickle
5+ import time
36import functools
4- from collections import deque
7+ from collections import defaultdict , deque
58
69import numpy as np
710from cloudvolume import CloudVolume
1720OCTREE_NODE_SIZE = 5
1821
1922
23+ def _morton_sort (cg : ChunkedGraph , children : np .ndarray ):
24+ """
25+ Sort children by their morton code.
26+ """
27+ if children .size == 0 :
28+ return children
29+ children_coords = []
30+
31+ for child in children :
32+ children_coords .append (cg .get_chunk_coordinates (child ))
33+
34+ def cmp_zorder (lhs , rhs ) -> bool :
35+ # https://en.wikipedia.org/wiki/Z-order_curve
36+ # https://github.com/google/neuroglancer/issues/272
37+ def less_msb (x : int , y : int ) -> bool :
38+ return x < y and x < (x ^ y )
39+
40+ msd = 2
41+ for dim in [1 , 0 ]:
42+ if less_msb (lhs [msd ] ^ rhs [msd ], lhs [dim ] ^ rhs [dim ]):
43+ msd = dim
44+ return lhs [msd ] - rhs [msd ]
45+
46+ children , _ = zip (
47+ * sorted (
48+ zip (children , children_coords ),
49+ key = functools .cmp_to_key (lambda x , y : cmp_zorder (x [1 ], y [1 ])),
50+ )
51+ )
52+ return np .array (children , dtype = NODE_ID )
53+
54+
2055def _get_hierarchy (cg : ChunkedGraph , node_id : np .uint64 ) -> dict :
56+ node_chunk_id_map = {node_id : cg .get_chunk_id (node_id )}
2157 children_map = {}
58+ children_chunks_map = {}
59+ chunk_nodes_map = {}
2260 layer = cg .get_chunk_layer (node_id )
2361 if layer < 2 :
24- return children_map
62+ return children_map , children_chunks_map , chunk_nodes_map
63+
64+ chunk_nodes_map [cg .get_chunk_id (node_id )] = np .array ([node_id ], dtype = NODE_ID )
2565 if layer == 2 :
2666 children_map [node_id ] = empty_1d .copy ()
27- return children_map
67+ children_chunks_map [node_id ] = empty_1d .copy ()
68+ return children_map , children_chunks_map , chunk_nodes_map
2869
2970 node_ids = np .array ([node_id ], dtype = NODE_ID )
3071 while node_ids .size > 0 :
@@ -34,10 +75,19 @@ def _get_hierarchy(cg: ChunkedGraph, node_id: np.uint64) -> dict:
3475 _ids = np .concatenate (list (children .values ())) if children else empty_1d .copy ()
3576 node_layers = cg .get_chunk_layers (_ids )
3677 node_ids = _ids [node_layers > 2 ]
78+ chunk_ids = cg .get_chunk_ids_from_node_ids (_ids )
79+ node_chunk_id_map .update (zip (_ids , chunk_ids ))
3780
3881 for l2id in _ids [node_layers == 2 ]:
3982 children_map [l2id ] = empty_1d .copy ()
40- return children_map
83+
84+ for k , v in children_map .items ():
85+ chunk_ids = np .array ([node_chunk_id_map [i ] for i in v ], dtype = NODE_ID )
86+ uchunk_ids = np .unique (chunk_ids )
87+ children_chunks_map [k ] = uchunk_ids
88+ for c in uchunk_ids :
89+ chunk_nodes_map [c ] = v [chunk_ids == c ]
90+ return children_map , children_chunks_map , chunk_nodes_map , node_chunk_id_map
4191
4292
4393def _get_node_coords_and_layers_map (
@@ -51,6 +101,13 @@ def _get_node_coords_and_layers_map(
51101 coords = cg .get_chunk_coordinates_multiple (node_ids [layer_mask ])
52102 _node_coords = dict (zip (node_ids [layer_mask ], coords ))
53103 coords_map .update (_node_coords )
104+
105+ chunk_id_coords_map = {}
106+ chunk_ids = cg .get_chunk_ids_from_node_ids (node_ids )
107+ node_chunk_id_map = dict (zip (node_ids , chunk_ids ))
108+ for k , v in coords_map .items ():
109+ chunk_id_coords_map [node_chunk_id_map [k ]] = v
110+ coords_map .update (chunk_id_coords_map )
54111 return coords_map , dict (zip (node_ids , node_layers ))
55112
56113
@@ -81,9 +138,9 @@ def _insert_skipped_nodes(
81138 skipped_layer = nl - count
82139 skipped_child = cg .get_chunk_id (layer = skipped_layer , x = x , y = y , z = z )
83140 limit = cg .get_segment_id_limit (skipped_child )
84- skipped_child += np . uint64 ( limit - 1 )
141+ skipped_child += limit - np . uint64 ( 1 )
85142 while skipped_child in new_children_map :
86- skipped_child = np . uint64 ( skipped_child - 1 )
143+ skipped_child = skipped_child - np . uint64 ( 1 )
87144
88145 skipped_hierarchy .append (skipped_child )
89146 coords_map [skipped_child ] = np .array ((x , y , z ), dtype = int )
@@ -99,38 +156,6 @@ def _insert_skipped_nodes(
99156 return new_children_map , coords_map , layers_map
100157
101158
102- def _sort_octree_row (cg : ChunkedGraph , children : np .ndarray ):
103- """
104- Sort children by their morton code.
105- """
106- if children .size == 0 :
107- return children
108- children_coords = []
109-
110- for child in children :
111- children_coords .append (cg .get_chunk_coordinates (child ))
112-
113- def cmp_zorder (lhs , rhs ) -> bool :
114- # https://en.wikipedia.org/wiki/Z-order_curve
115- # https://github.com/google/neuroglancer/issues/272
116- def less_msb (x : int , y : int ) -> bool :
117- return x < y and x < (x ^ y )
118-
119- msd = 2
120- for dim in [1 , 0 ]:
121- if less_msb (lhs [msd ] ^ rhs [msd ], lhs [dim ] ^ rhs [dim ]):
122- msd = dim
123- return lhs [msd ] - rhs [msd ]
124-
125- children , _ = zip (
126- * sorted (
127- zip (children , children_coords ),
128- key = functools .cmp_to_key (lambda x , y : cmp_zorder (x [1 ], y [1 ])),
129- )
130- )
131- return children
132-
133-
134159def _validate_octree (octree : np .ndarray , octree_node_ids : np .ndarray ):
135160 assert octree .size % 5 == 0 , "Invalid octree size."
136161 num_nodes = octree .size // 5
@@ -174,7 +199,13 @@ def _explore_node(node: int):
174199
175200
176201def build_octree (
177- cg : ChunkedGraph , node_id : np .uint64 , children_map : dict , mesh_fragments : dict
202+ cg : ChunkedGraph ,
203+ node_id : np .uint64 ,
204+ children_map : dict ,
205+ children_chunks_map : dict ,
206+ chunk_nodes_map : dict ,
207+ node_chunk_id_map : dict ,
208+ mesh_fragments : dict ,
178209):
179210 """
180211 From neuroglancer multiscale specification:
@@ -188,59 +219,82 @@ def build_octree(
188219 requested/rendered.
189220 """
190221 node_q = deque ()
191- node_q .append (node_id )
192- coords_map , layers_map = _get_node_coords_and_layers_map (cg , children_map )
193- children_map , coords_map , layers_map = _insert_skipped_nodes (
194- cg , children_map , coords_map , layers_map
195- )
222+ node_q .append (node_chunk_id_map [node_id ])
223+ coords_map , _ = _get_node_coords_and_layers_map (cg , children_map )
196224
197- ROW_TOTAL = len (children_map )
198- row_counter = len (children_map )
225+ ROW_TOTAL = len (chunk_nodes_map )
226+ row_counter = len (chunk_nodes_map )
199227 octree_size = OCTREE_NODE_SIZE * ROW_TOTAL
200228 octree = np .zeros (octree_size , dtype = np .uint32 )
201229
202230 octree_node_ids = ROW_TOTAL * [0 ]
203- octree_fragments = ROW_TOTAL * [ "" ]
231+ octree_fragments = defaultdict ( list )
204232 rows_used = 1
205233
206234 while len (node_q ) > 0 :
235+ frags = []
207236 row_counter -= 1
208- current_node = node_q .popleft ()
209- children = children_map [current_node ]
210- octree_node_ids [row_counter ] = current_node
237+ current_chunk = node_q .popleft ()
238+ chunk_nodes = chunk_nodes_map [current_chunk ]
239+
240+ for k in chunk_nodes :
241+ if k in mesh_fragments :
242+ frags .append (mesh_fragments [k ])
243+ octree_fragments [int (current_chunk )].extend (normalize_fragments (frags ))
244+
245+ children_chunks = set ()
246+ for k in chunk_nodes :
247+ children_chunks .update (children_chunks_map [k ])
248+
249+ children_chunks = np .array (list (children_chunks ), dtype = NODE_ID )
250+ children_chunks = _morton_sort (cg , children_chunks )
251+ for child_chunk in children_chunks :
252+ node_q .append (child_chunk )
253+
254+ octree_node_ids [row_counter ] = current_chunk
211255
212256 offset = OCTREE_NODE_SIZE * row_counter
213- x , y , z = coords_map [current_node ]
257+ x , y , z = coords_map [current_chunk ]
214258 octree [offset + 0 ] = x
215259 octree [offset + 1 ] = y
216260 octree [offset + 2 ] = z
217261
218- rows_used += children .size
262+ rows_used += children_chunks .size
219263 start = ROW_TOTAL - rows_used
220- end_empty = start + children .size
264+ end_empty = start + children_chunks .size
221265
222266 octree [offset + 3 ] = start
223267 octree [offset + 4 ] = end_empty
224- try :
225- if children .size == 1 :
226- # mark node virtual
227- octree [offset + 3 ] |= 1 << 31
228- else :
229- octree_fragments [row_counter ] = mesh_fragments [current_node ]
230- except KeyError :
231- # no mesh, mark node empty
268+ if len (octree_fragments [int (current_chunk )]) == 0 :
232269 octree [offset + 4 ] |= 1 << 31
233270
234- children = _sort_octree_row (cg , children )
235- for child in children :
236- node_q .append (child )
271+ # print()
272+ # print(current_chunk, list(chunk_nodes), list(children_chunks))
237273
238- _validate_octree (octree , octree_node_ids )
239- return octree , octree_node_ids , octree_fragments
274+ # _validate_octree(octree, octree_node_ids)
275+ fragments = []
276+ for node in octree_node_ids :
277+ fragments .append (octree_fragments [int (node )])
278+ return octree , octree_node_ids , fragments
240279
241280
242281def get_manifest (cg : ChunkedGraph , node_id : np .uint64 ) -> dict :
243- children_map = _get_hierarchy (cg , node_id )
282+ start = time .time ()
283+ fname = f"dist/multiscale/{ node_id } _children_map.bin"
284+ try :
285+ with open (fname , "rb" ) as f :
286+ children_map , children_chunks_map , chunk_nodes_map , node_chunk_id_map = (
287+ pickle .load (f )
288+ )
289+ print ("got children_map from pickle" )
290+ except :
291+ children_map , children_chunks_map , chunk_nodes_map , node_chunk_id_map = (
292+ _get_hierarchy (cg , node_id )
293+ )
294+ with open (fname , "wb" ) as f :
295+ pickle .dump ((children_map , children_chunks_map , chunk_nodes_map ), f )
296+ print ("wrote children_map to pickle" )
297+
244298 node_ids = np .fromiter (children_map .keys (), dtype = NODE_ID )
245299 manifest_cache = ManifestCache (cg .graph_id , initial = True )
246300
@@ -251,25 +305,45 @@ def get_manifest(cg: ChunkedGraph, node_id: np.uint64) -> dict:
251305 progress = False ,
252306 )
253307
254- fragments_d , _not_cached , _ = manifest_cache .get_fragments (node_ids )
255- initial_meshes = cv .mesh .initial_exists (_not_cached , return_byte_range = True )
256- _fragments_d , _ = del_none_keys (initial_meshes )
257- manifest_cache .set_fragments (_fragments_d )
258- fragments_d .update (_fragments_d )
308+ fname = f"dist/multiscale/{ node_id } _fragments.json"
309+ try :
310+ with open (fname , "r" ) as f :
311+ _fragments_d = json .load (f )
312+ fragments_d = {np .uint64 (k ): v for k , v in _fragments_d .items ()}
313+ print ("got fragments_d from json" )
314+ except :
315+ fragments_d , _not_cached , _ = manifest_cache .get_fragments (node_ids )
316+ initial_meshes = cv .mesh .initial_exists (_not_cached , return_byte_range = True )
317+ _fragments_d , _ = del_none_keys (initial_meshes )
318+ manifest_cache .set_fragments (_fragments_d )
319+ fragments_d .update (_fragments_d )
320+ with open (fname , "w" ) as f :
321+ json .dump ({int (k ): v for k , v in fragments_d .items ()}, f )
322+ print ("wrote fragments_d to json" )
323+
324+ print (cg .graph_id , node_id , len (children_map ), len (fragments_d ))
325+ octree , node_ids , fragments = build_octree (
326+ cg ,
327+ node_id ,
328+ children_map ,
329+ children_chunks_map ,
330+ chunk_nodes_map ,
331+ node_chunk_id_map ,
332+ fragments_d ,
333+ )
259334
260- octree , node_ids , fragments = build_octree (cg , node_id , children_map , fragments_d )
261335 max_layer = min (cg .get_chunk_layer (node_id ) + 1 , cg .meta .layer_count )
262-
263336 chunk_shape = np .array (cg .meta .graph_config .CHUNK_SIZE , dtype = np .dtype ("<f4" ))
264337 chunk_shape *= cg .meta .resolution
265338 clip_bounds = cg .meta .voxel_bounds .T * cg .meta .resolution
266339 response = {
267340 "chunkShape" : chunk_shape ,
268341 "chunkGridSpatialOrigin" : np .array ([0 , 0 , 0 ], dtype = np .dtype ("<f4" )),
269342 "lodScales" : np .arange (2 , max_layer , dtype = np .dtype ("<f4" )) * 1 ,
270- "fragments" : normalize_fragments ( fragments ) ,
343+ "fragments" : fragments ,
271344 "octree" : octree ,
272345 "clipLowerBound" : np .array (clip_bounds [0 ], dtype = np .dtype ("<f4" )),
273346 "clipUpperBound" : np .array (clip_bounds [1 ], dtype = np .dtype ("<f4" )),
274347 }
348+ print ("time" , time .time () - start )
275349 return node_ids , response
0 commit comments