Skip to content

Commit

Permalink
Cleanup commits
Browse files Browse the repository at this point in the history
  • Loading branch information
Giulero committed Mar 8, 2024
1 parent 6191da9 commit 924da1f
Show file tree
Hide file tree
Showing 3 changed files with 25 additions and 30 deletions.
2 changes: 1 addition & 1 deletion src/adam/model/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ def get_joints_chain(self, root: str, target: str) -> List[Joint]:
List[Joint]: the list of the joints
"""

if target not in list(self.links) and target not in list(self.frames):
if target not in list(self.links) and target not in list(self.tree.graph):
raise ValueError(f"{target} is not not in the robot model.")

if target == root:
Expand Down
10 changes: 3 additions & 7 deletions src/adam/model/std_factories/std_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,7 @@ def urdf_remove_sensors_tags(xml_string):
for sensors_tag in root.findall("sensor"):
root.remove(sensors_tag)

# Convert the modified XML back to a string
modified_xml_string = ET.tostring(root)

return modified_xml_string
return ET.tostring(root)


class URDFModelFactory(ModelFactory):
Expand All @@ -44,9 +41,8 @@ def __init__(self, path: str, math: SpatialMath):
# to have a useless and noisy warning, let's remove before hands all the sensor elements,
# that anyhow are not parser by urdf_parser_py or adam
# See https://github.com/ami-iit/ADAM/issues/59
xml_file = open(path, "r")
xml_string = xml_file.read()
xml_file.close()
with open(path, "r") as xml_file:
xml_string = xml_file.read()
xml_string_without_sensors_tags = urdf_remove_sensors_tags(xml_string)
self.urdf_desc = urdf_parser_py.urdf.URDF.from_xml_string(
xml_string_without_sensors_tags
Expand Down
43 changes: 21 additions & 22 deletions src/adam/model/tree.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,39 +102,38 @@ def reduce(self, considered_joint_names: List[str]) -> "Tree":
for joint in fixed_joints:
joint.type = "fixed"

for f_joint in fixed_joints:
merged_node = self.graph[f_joint.parent]
merged_neighbors = self.graph[f_joint.child]
for fixed_j in fixed_joints:
saved_node = self.graph[fixed_j.parent]
removing_node = self.graph[fixed_j.child]

merged_node.children.remove(merged_neighbors)
merged_node.children.extend(merged_neighbors.children)
saved_node.children.remove(removing_node)
saved_node.children.extend(removing_node.children)
# update the arcs
merged_node.arcs.remove(f_joint)
merged_node.arcs.extend(merged_neighbors.arcs)
saved_node.arcs.remove(fixed_j)
saved_node.arcs.extend(removing_node.arcs)

merged_node.link = merged_node.link.lump(
other=merged_neighbors.link, joint=f_joint
saved_node.link = saved_node.link.lump(
other=removing_node.link, joint=fixed_j
)

merged_joint = merged_node.parent_arc
removed_joint = merged_neighbors.parent_arc
merged_joint = saved_node.parent_arc
removed_joint = removing_node.parent_arc
# update the parent arc of the merged node
merged_node.parent_arc = merged_node.parent_arc.lump(removed_joint)
# saved_node.parent_arc = saved_node.parent_arc.lump(removed_joint)

# we need to updated the parents and child on the joints in fixed_joints
for joint in self.get_joint_list():
if joint.parent == merged_neighbors.name:
joint.parent = merged_node.name
if joint.child == merged_neighbors.name:
joint.child = merged_node.name
if joint.parent == removing_node.name:
joint.parent = saved_node.name
if joint.child == removing_node.name:
joint.child = saved_node.name

for child in merged_node.children:
for child in saved_node.children:
child.parent = saved_node.link
child.parent_arc = saved_node.parent_arc

child.parent = merged_node.link
child.parent_arc = merged_node.parent_arc

self.graph.pop(merged_neighbors.name)
self.graph[merged_node.name] = merged_node
self.graph.pop(removing_node.name)
self.graph[saved_node.name] = saved_node

if {joint.name for joint in self.get_joint_list()} != set(
considered_joint_names
Expand Down

0 comments on commit 924da1f

Please sign in to comment.