Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Support 3MF component path #2296

Merged
merged 4 commits into from
Oct 14, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion tests/test_3mf.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,10 @@ def test_roundtrip(self):
file_type="3mf",
)

assert set(s.geometry.keys()) == set(r.geometry.keys())
assert set(s.geometry.keys()) == set(r.geometry.keys()), (
s.geometry.keys(),
r.geometry.keys(),
)
assert g.np.allclose(s.bounds, r.bounds)
assert g.np.isclose(s.area, r.area, rtol=1e-3)

Expand Down
105 changes: 67 additions & 38 deletions trimesh/exchange/threemf.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,35 @@
import collections
import io
import uuid
import zipfile
from collections import defaultdict

import numpy as np

from .. import graph, util
from ..constants import log
from ..util import unique_name


def _read_mesh(mesh):
vertices = mesh.find("{*}vertices")
v_array = np.array(
[
[i.attrib["x"], i.attrib["y"], i.attrib["z"]]
for i in vertices.iter("{*}vertex")
],
dtype=np.float64,
)

faces = mesh.find("{*}triangles")
f_array = np.array(
[
[i.attrib["v1"], i.attrib["v2"], i.attrib["v3"]]
for i in faces.iter("{*}triangle")
],
dtype=np.int64,
)

return v_array, f_array


def load_3MF(file_obj, postprocess=True, **kwargs):
Expand All @@ -23,6 +46,7 @@ def load_3MF(file_obj, postprocess=True, **kwargs):
kwargs : dict
Constructor arguments for `trimesh.Scene`
"""

# dict, {name in archive: BytesIo}
archive = util.decompress(file_obj, file_type="zip")
# get model with case-insensitive keys
Expand All @@ -40,66 +64,74 @@ def load_3MF(file_obj, postprocess=True, **kwargs):
# { mesh id : mesh name}
id_name = {}
# { mesh id: (n,3) float vertices}
v_seq = {}
v_seq = defaultdict(list)
# { mesh id: (n,3) int faces}
f_seq = {}
f_seq = defaultdict(list)
# components are objects that contain other objects
# {id : [other ids]}
components = collections.defaultdict(list)
components = defaultdict(list)
# load information about the scene graph
# each instance is a single geometry
build_items = []

# keep track of names we can use
consumed_counts = {}
consumed_names = set()

# iterate the XML object and build elements with an LXML iterator
# loaded elements are cleared to avoid ballooning memory
model.seek(0)
for _, obj in etree.iterparse(model, tag=("{*}object", "{*}build")):
for _, obj in etree.iterparse(model, tag=("{*}object", "{*}build"), events=("end",)):
# parse objects
if "object" in obj.tag:
# id is mandatory
index = obj.attrib["id"]

# start with stored name
name = obj.attrib.get("name", str(index))
# apparently some exporters name multiple meshes
# the same thing so check to see if it's been used
if name in consumed_names:
name = name + str(index)
name = unique_name(
obj.attrib.get("name", str(index)), consumed_names, consumed_counts
)
consumed_names.add(name)
# store name reference on the index
id_name[index] = name

# if the object has actual geometry data parse here
for mesh in obj.iter("{*}mesh"):
vertices = mesh.find("{*}vertices")
v_seq[index] = np.array(
[
[i.attrib["x"], i.attrib["y"], i.attrib["z"]]
for i in vertices.iter("{*}vertex")
],
dtype=np.float64,
)
vertices.clear()
vertices.getparent().remove(vertices)

faces = mesh.find("{*}triangles")
f_seq[index] = np.array(
[
[i.attrib["v1"], i.attrib["v2"], i.attrib["v3"]]
for i in faces.iter("{*}triangle")
],
dtype=np.int64,
)
faces.clear()
faces.getparent().remove(faces)
v, f = _read_mesh(mesh)
v_seq[index].append(v)
f_seq[index].append(f)

# components are references to other geometries
for c in obj.iter("{*}component"):
mesh_index = c.attrib["objectid"]
transform = _attrib_to_transform(c.attrib)
components[index].append((mesh_index, transform))

# if this references another file as the `path` attrib
path = next(
(v.strip("/") for k, v in c.attrib.items() if k.endswith("path")),
None,
)
if path is not None and path in archive:
archive[path].seek(0)
name = unique_name(
obj.attrib.get("name", str(mesh_index)),
consumed_names,
consumed_counts,
)
consumed_names.add(name)
# store name reference on the index
id_name[mesh_index] = name

for _, m in etree.iterparse(
archive[path], tag=("{*}mesh"), events=("end",)
):
v, f = _read_mesh(m)
v_seq[mesh_index].append(v)
f_seq[mesh_index].append(f)

# parse build
if "build" in obj.tag:
# scene graph information stored here, aka "build" the scene
Expand All @@ -109,19 +141,15 @@ def load_3MF(file_obj, postprocess=True, **kwargs):
# the index of the geometry this item instantiates
build_items.append((item.attrib["objectid"], transform))

# free resources
obj.clear()
obj.getparent().remove(obj)
del obj

# have one mesh per 3MF object
# one mesh per geometry ID, store as kwargs for the object
meshes = {}
for gid in v_seq.keys():
v, f = util.append_faces(v_seq[gid], f_seq[gid])
name = id_name[gid]
meshes[name] = {
"vertices": v_seq[gid],
"faces": f_seq[gid],
"vertices": v,
"faces": f,
"metadata": metadata.copy(),
}
meshes[name].update(kwargs)
Expand All @@ -143,7 +171,7 @@ def load_3MF(file_obj, postprocess=True, **kwargs):
# flatten the scene structure and simplify to
# a single unique node per instance
graph_args = []
parents = collections.defaultdict(set)
parents = defaultdict(set)
for path in graph.multigraph_paths(G=g, source="world"):
# collect all the transform on the path
transforms = graph.multigraph_collect(G=g, traversal=path, attrib="matrix")
Expand All @@ -157,8 +185,9 @@ def load_3MF(file_obj, postprocess=True, **kwargs):
last = path[-1][0]
# if someone included an undefined component, skip it
if last not in id_name:
log.debug(f"id {last} included but not defined!")
log.warning(f"id {last} included but not defined!")
continue

# frame names unique
name = id_name[last] + util.unique_id()
# index in meshes
Expand Down
33 changes: 14 additions & 19 deletions trimesh/path/polygons.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from shapely.geometry import Polygon

from .. import bounds, geometry, graph, grouping
from ..boolean import reduce_cascade
from ..constants import log
from ..constants import tol_path as tol
from ..transformations import transform_points
Expand Down Expand Up @@ -162,14 +163,14 @@ def edges_to_polygons(edges: NDArray[int64], vertices: NDArray[float64]):
# find which polygons contain which other polygons
roots, tree = enclosure_tree(polygons)

# generate list of polygons with proper interiors
complete = []
for root in roots:
interior = list(tree[root].keys())
shell = polygons[root].exterior.coords
holes = [polygons[i].exterior.coords for i in interior]
complete.append(Polygon(shell=shell, holes=holes))
return complete
# generate polygons with proper interiors
return [
Polygon(
shell=polygons[root.exterior],
holes=[polygons[i].exterior for i in tree[root].keys()],
)
for root in roots
]


def polygons_obb(polygons: Iterable[Polygon]):
Expand Down Expand Up @@ -864,17 +865,11 @@ def projected(
return polygons[0]
elif len(polygons) == 0:
return None
# inflate each polygon before unioning to remove zero-size
# gaps then deflate the result after unioning by the same amount
# note the following provides a 25% speedup but needs
# more testing to see if it deflates to a decent looking
# result:
# polygon = ops.unary_union(
# [p.buffer(padding,
# join_style=2,
# mitre_limit=1.5)
# for p in polygons]).buffer(-padding)
return ops.unary_union([p.buffer(padding) for p in polygons]).buffer(-padding)

# in my tests this was substantially faster than `shapely.ops.unary_union`
return (
reduce_cascade(lambda a, b: a.union(b), polygons).buffer(padding).buffer(-padding)
)


def second_moments(polygon: Polygon, return_centered=False):
Expand Down
Loading