Skip to content

Commit

Permalink
Merge pull request #2 from ChrisFloofyKitsune/fix-unexpected-sharps-o…
Browse files Browse the repository at this point in the history
…n-import

Fix unexpected sharps on import
  • Loading branch information
ChrisFloofyKitsune authored May 26, 2024
2 parents 5e1b050 + 5060491 commit 7796aa5
Show file tree
Hide file tree
Showing 4 changed files with 20 additions and 22 deletions.
2 changes: 1 addition & 1 deletion __init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
'author': 'ChrisFloofyKitsune, based on s3o code by Beherith and Muon',
"description": "Tools for working with *.s3o files.",
'category': 'Import-Export',
'version': (0, 2, 4),
'version': (0, 2, 5),
'blender': (4, 1, 0)
}

Expand Down
2 changes: 1 addition & 1 deletion ambient_occlusion.py
Original file line number Diff line number Diff line change
Expand Up @@ -416,7 +416,7 @@ def ao_adjust(ao_in):
bm.from_mesh(mesh)
bm.verts.ensure_lookup_table()

corners_to_fix = set(np.flatnonzero(np.isclose(min_ao_data, 0, atol=0.05)))
corners_to_fix = set(np.flatnonzero(np.isclose(min_ao_data, 0, atol=0.05, rtol=0)))
for corner_idx in corners_to_fix:
vert_idx = mesh.loops[corner_idx].vertex_index
bm_loop = next(l for l in bm.verts[vert_idx].link_loops if l.index == corner_idx)
Expand Down
18 changes: 8 additions & 10 deletions s3o_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,14 +116,11 @@ def make_bl_obj_from_s3o_mesh(
# store this now so that values are not overlooked as a result of the de-duplication steps
v_ambient_occlusion: list[float] = [v.ambient_occlusion for v in p_vertices]

duplicate_verts = []

if merge_vertices:
duplicate_verts = util.make_duplicates_mapping(p_vertices, 0.001)

for i, current_vert_index in enumerate(idx_pair[0] for idx_pair in p_indices):
if current_vert_index in duplicate_verts:
p_indices[i] = (duplicate_verts[current_vert_index], p_indices[i][1])
p_indices[i] = (duplicate_verts[current_vert_index], p_indices[i][1])

type_face_indices = list[tuple[int, int, int, int]]

Expand All @@ -144,19 +141,20 @@ def make_bl_obj_from_s3o_mesh(
# tex_coords are always considered unique per vertex
v_tex_coords: dict[int, Vector] = {}

for i, vertex in ((i, v) for i, v in enumerate(p_vertices) if i not in duplicate_verts):
(v_positions[i], v_normals[i], v_tex_coords[i]) = vertex
for i, vertex in enumerate(p_vertices):
v_positions[i] = vertex.position
v_normals[i] = vertex.normal
v_tex_coords[i] = vertex.tex_coords

if merge_vertices:
duplicate_positions = util.make_duplicates_mapping(v_positions, 0.002)
norms_to_check = {i: v_normals[i] for i in duplicate_positions.keys()}
duplicate_normals = util.make_duplicates_mapping(norms_to_check, 0.01)
duplicate_normals = util.make_duplicates_mapping(v_normals, 0.01)

for face_indices in face_indices_list:
for i, (pos_idx, norm_idx, tex_coord_idx, ao_idx) in enumerate(face_indices):
face_indices[i] = (
duplicate_positions[pos_idx] if pos_idx in duplicate_positions else pos_idx,
duplicate_normals[norm_idx] if norm_idx in duplicate_normals else norm_idx,
duplicate_positions[pos_idx],
duplicate_normals[norm_idx],
tex_coord_idx,
ao_idx
)
Expand Down
20 changes: 10 additions & 10 deletions util.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,12 +56,12 @@ def extract_null_terminated_string(data: bytes, offset: int) -> str:
def make_duplicates_mapping(
values: dict[int, npt.ArrayLike] | npt.ArrayLike,
tolerance=0.001,
) -> dict[int, int]:
) -> npt.NDArray[int]:
np_array: npt.NDArray
try:
if type(values) is dict:
if len(values) == 0:
return dict()
return np.array([], dtype=int)
example_array = np.array(next(iter(values.values()), ()))
np_array = np.full_like(
example_array, fill_value=np.nan, shape=(max(values.keys()) + 1, *example_array.shape)
Expand All @@ -70,24 +70,24 @@ def make_duplicates_mapping(
else:
np_array = np.array(values)
if np_array.size == 0:
return {}
return np.array([], dtype=int)

indexes_of_originals = np.arange(len(np_array), dtype=int)
idx_to_orig_idx = np.arange(len(np_array), dtype=int)

for idx in range(len(np_array) - 1):
current_value = np_array[idx]

# skip if value is "empty" or if this value was already marked as a duplicate
if np.all(np.isnan(current_value)):
continue
if indexes_of_originals[idx] < idx:
if idx_to_orig_idx[idx] < idx:
continue

slice_compare_results = np.isclose(np_array[idx + 1:], current_value, atol=tolerance)
slice_compare_results = np.logical_and.reduce(slice_compare_results, (*range(0, np_array.ndim),)[1:])
np.copyto(indexes_of_originals[idx + 1:], idx, where=slice_compare_results)
result = {idx: orig_idx for idx, orig_idx in enumerate(indexes_of_originals) if idx != orig_idx}
return result
slice_compare_results = np.isclose(np_array[idx + 1:], current_value, atol=tolerance, rtol=0)
# exclude first axis
slice_compare_results = np.all(slice_compare_results, axis=tuple(range(1, np_array.ndim)))
np.copyto(idx_to_orig_idx[idx + 1:], idx, where=slice_compare_results)
return idx_to_orig_idx

except Exception as err:
print("WARNING could not find dupes!", err)
Expand Down

0 comments on commit 7796aa5

Please sign in to comment.