Skip to content

Commit

Permalink
remove DXF xrecord and nicer names in split_scene
Browse files Browse the repository at this point in the history
  • Loading branch information
mikedh committed Oct 31, 2018
1 parent 842652a commit 4e12d6a
Show file tree
Hide file tree
Showing 8 changed files with 108 additions and 144 deletions.
26 changes: 0 additions & 26 deletions tests/test_dxf.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,32 +68,6 @@ def test_spline(self):
assert len(d.entities[0].points) == len(r.entities[0].points)
assert len(d.entities[0].knots) == len(r.entities[0].knots)

def test_xrecord(self):
# data to store to test export / import round trip
data = {'thangs': 'poppin',
'pnts': g.np.arange(9).reshape((-1, 3))}

# get a drawing and add our data to metadata
d = g.get_mesh('2D/wrench.dxf')
d.metadata.update(data)

# get a path we can write
temp_name = g.tempfile.NamedTemporaryFile(
suffix='.dxf', delete=False).name

# export as a DXF file, which should put our
# custom data into an XRecord
d.export(temp_name, include_metadata=True)

# reload from export
r = g.trimesh.load(temp_name)

# check numpy round trip
assert g.np.allclose(r.metadata['pnts'],
data['pnts'])
# check string roundtrip
assert r.metadata['thangs'] == 'poppin'

def test_versions(self):
"""
DXF files have a bajillion versions, so test against
Expand Down
3 changes: 3 additions & 0 deletions tests/test_scene.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,9 @@ def test_scene(self):
len(scene_base.geometry))

for s in [scene_split, scene_base]:
pre = s.md5()
assert len(s.geometry) > 0
assert s.is_valid

flattened = s.graph.to_flattened()
g.json.dumps(flattened)
Expand All @@ -54,6 +56,7 @@ def test_scene(self):
assert g.trimesh.util.is_shape(s.triangles, (-1, 3, 3))
assert len(s.triangles) == len(s.triangles_node)

assert s.md5() == pre
assert s.md5() is not None

assert len(s.duplicate_nodes) > 0
Expand Down
140 changes: 38 additions & 102 deletions trimesh/path/io/dxf.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,20 +60,6 @@
get_resource('dxf.json.template')).items()}


def get_key(blob, field, code):
try:
line = blob[np.nonzero(blob[:, 1] == field)[0][0] + 1]
except IndexError:
return None
if line[0] == code:
try:
return int(line[1])
except ValueError:
return line[1]
else:
return None


def load_dxf(file_obj, **kwargs):
"""
Load a DXF file to a dictionary containing vertices and
Expand All @@ -88,56 +74,6 @@ def load_dxf(file_obj, **kwargs):
result: dict, keys are entities, vertices and metadata
"""

def get_metadata():
"""
Get metadata from DXF objects section of the file.
Returns
----------
metadata : dict
Any available metadata stored in XRecord
"""
metadata = {}
# save file version info
metadata['ACADVER'] = get_key(header_blob, '$ACADVER', '1')

# get the section which contains objects in the DXF file
obj_start = np.nonzero(blob[:, 1] == 'OBJECTS')[0]

if len(obj_start) == 0:
return metadata
obj_start = obj_start[0]

obj_end = endsec[np.searchsorted(endsec, obj_start)]
obj_blob = blob[obj_start:obj_end]

# the index of xrecords are one past the group code key
xrecords = np.nonzero((
obj_blob == ['100', 'ACDBXRECORD']).all(axis=1))[0] + 1

# if there are no XRecords return
if len(xrecords) == 0:
return metadata

# resplit the file data without upper() to preserve case
blob_lower = np.array(str.splitlines(raw)).reshape((-1, 2))
# newlines and split should never be effected by upper()
assert len(blob_lower) == len(blob)

# the likely exceptions are related to JSON decoding
try:
# loop through xrecords by group code
for code, data in blob_lower[obj_start:obj_end][xrecords]:
if code == str(XRECORD_METADATA):
metadata.update(json.loads(data))
# we could store xrecords in the else here
# but they have a lot of garbage so don't
# metadata['XRECORD_' + code] = data
except BaseException:
log.error('failed to load metadata!', exc_info=True)

return metadata

def info(e):
"""
Pull metadata based on group code, and return as a dict.
Expand Down Expand Up @@ -367,7 +303,7 @@ def convert_bspline(e):
# no converter to ASCII DXF available
raise ValueError('binary DXF not supported!')
else:
# convert to R14 ASCII DXF
# convert binary DXF to R14 ASCII DXF
raw = _teigha_convert(raw, extension='dxf')
else:
# we've been passed bytes that don't have the
Expand All @@ -392,26 +328,30 @@ def convert_bspline(e):
entity_end = endsec[np.searchsorted(endsec, entity_start)]
entity_blob = blob[entity_start:entity_end]

# try to load path metadata from xrecords stored in DXF
try:
metadata = get_metadata()
except BaseException:
log.error('failed to extract metadata!',
exc_info=True)
metadata = {}
# store some properties from the DXF header
metadata = {'DXF_HEADER': {}}

for key in ['$DIMSCALE', '$DIMUNIT', '$INSUNITS', '$LUNITS']:
value = get_key(header_blob,
key,
'70')
if value is not None:
metadata['DXF_HEADER'][key] = value

# store unit data pulled from the header of the DXF
# prefer LUNITS over INSUNITS
# I couldn't find a table for LUNITS values but they
# look like they are 0- indexed versions of
# the INSUNITS keys, so for now offset the key value
for offset, key in [(0, '$INSUNITS'),
(-1, '$LUNITS')]:
for offset, key in [(-1, '$LUNITS'),
(0, '$INSUNITS')]:
# get the key from the header blob
units = get_key(header_blob, key, '70')
# if it exists add the offset
if units is not None:
units += offset
if units is None:
continue
metadata[key] = units
units += offset
# if the key is in our list of units store it
if units in _DXF_UNITS:
metadata['units'] = _DXF_UNITS[units]
Expand Down Expand Up @@ -498,7 +438,7 @@ def convert_bspline(e):
return result


def export_dxf(path, include_metadata=False):
def export_dxf(path):
"""
Export a 2D path object to a DXF file
Expand Down Expand Up @@ -667,28 +607,6 @@ def convert_generic(entity, vertices):
"""
return convert_line(entity, vertices)

def convert_metadata():
"""
Save path metadata as a DXF Xrecord object.
"""
if (not include_metadata) or len(path.metadata) == 0:
return ''
# dump metadata to compact JSON
# make sure there are no newlines to break DXF
# util.jsonify will be able to convert numpy arrays
as_json = util.jsonify(
path.metadata,
separators=(',', ':')).replace('\n', ' ')

# create an XRECORD for our use
xrecord = TEMPLATES['xrecord'].substitute({
'INDEX': XRECORD_METADATA,
'DATA': as_json})
# add the XRECORD to an objects section
result = TEMPLATES['objects'].substitute({
'OBJECTS': xrecord})
return result

# make sure we're not losing a ton of
# precision in the string conversion
np.set_printoptions(precision=12)
Expand Down Expand Up @@ -717,12 +635,10 @@ def convert_metadata():
entities = TEMPLATES['entities'].substitute({
'ENTITIES': entities_str})
footer = TEMPLATES['footer'].substitute()
# metadata encoded as objects section
objects = convert_metadata()

# filter out empty sections
sections = [i for i in [header,
entities,
objects,
footer]
if len(i) > 0]

Expand Down Expand Up @@ -759,6 +675,24 @@ def load_dwg(file_obj, **kwargs):
return result


def get_key(blob, field, code):
"""
Given a loaded (n, 2) blob and a field name
get a value by code.
"""
try:
line = blob[np.nonzero(blob[:, 1] == field)[0][0] + 1]
except IndexError:
return None
if line[0] == code:
try:
return int(line[1])
except ValueError:
return line[1]
else:
return None


def _teigha_convert(data, extension='dwg'):
"""
Convert any DXF/DWG to R14 ASCII DXF using Teigha Converter.
Expand Down Expand Up @@ -836,11 +770,13 @@ def _teigha_convert(data, extension='dwg'):


# the DWG to DXF converter
# they renamed it at some point but it is the same
for _name in ['ODAFileConverter',
'TeighaFileConverter']:
_teigha = find_executable(_name)
if _teigha is not None:
break

# suppress X11 output
_xvfb_run = find_executable('xvfb-run')

Expand Down
12 changes: 9 additions & 3 deletions trimesh/path/simplify.py
Original file line number Diff line number Diff line change
Expand Up @@ -165,6 +165,9 @@ def merge_colinear(points, scale):
points = np.asanyarray(points, dtype=np.float64)
scale = float(scale)

if len(points.shape) != 2 or points.shape[1] != 2:
raise ValueError('only for 2D points!')

# if there's less than 3 points nothing to merge
if len(points) < 3:
return points.copy()
Expand All @@ -185,12 +188,15 @@ def merge_colinear(points, scale):
# if we have points A B C D
# and direction vectors A-B, B-C, etc
# these will be perpendicular to the vectors A-C, B-D, etc
perpendicular = (points[2:] - points[:-2]).T[::-1].T
perpendicular /= np.linalg.norm(perpendicular, axis=1).reshape((-1, 1))
perp = (points[2:] - points[:-2]).T[::-1].T
perp_norm = np.linalg.norm(perp, axis=1)
perp_nonzero = perp_norm > tol.merge
perp[perp_nonzero] /= perp_norm[perp_nonzero].reshape((-1, 1))

# find the projection of each direction vector
# onto the perpendicular vector
projection = np.abs(diagonal_dot(perpendicular, direction[:-1]))
projection = np.abs(diagonal_dot(perp,
direction[:-1]))

projection_ratio = np.max((projection / direction_norm[1:],
projection / direction_norm[:-1]), axis=0)
Expand Down
4 changes: 2 additions & 2 deletions trimesh/resources/helpers/dxf_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,5 +43,5 @@ def read_files(path):
write_json(t)

# dump JSON to files for editing
#$t = get_json()
#$write_files(t)
# $t = get_json()
# $write_files(t)
Loading

0 comments on commit 4e12d6a

Please sign in to comment.