diff --git a/omnigibson/object_states/particle_modifier.py b/omnigibson/object_states/particle_modifier.py index 8f62f56f9..2502a6f96 100644 --- a/omnigibson/object_states/particle_modifier.py +++ b/omnigibson/object_states/particle_modifier.py @@ -1120,7 +1120,7 @@ def _modify_particles(self, system): system.create_attachment_group(obj=self.obj) avg_scale = np.cbrt(np.product(self.obj.scale)) scales = system.sample_scales_by_group(group=group, n=len(start_points)) - cuboid_dimensions = scales * system.particle_object.extent.reshape(1, 3) * avg_scale + cuboid_dimensions = scales * system.particle_object.aabb_extent.reshape(1, 3) * avg_scale else: scales = None cuboid_dimensions = np.zeros(3) diff --git a/omnigibson/prims/entity_prim.py b/omnigibson/prims/entity_prim.py index 6dbe51444..8e332d887 100644 --- a/omnigibson/prims/entity_prim.py +++ b/omnigibson/prims/entity_prim.py @@ -12,6 +12,7 @@ from omnigibson.prims.xform_prim import XFormPrim from omnigibson.utils.constants import PrimType, GEOM_TYPES, JointType, JointAxis from omnigibson.utils.ui_utils import suppress_omni_log +from omnigibson.utils.usd_utils import PoseAPI from omnigibson.macros import gm, create_module_macros @@ -620,6 +621,7 @@ def set_joint_positions(self, positions, indices=None, normalized=False, drive=F self._articulation_view.set_joint_position_targets(positions, joint_indices=indices) else: self._articulation_view.set_joint_positions(positions, joint_indices=indices) + PoseAPI.invalidate() def set_joint_velocities(self, velocities, indices=None, normalized=False, drive=False): """ @@ -920,6 +922,7 @@ def set_position_orientation(self, position=None, orientation=None): if orientation is not None: orientation = np.asarray(orientation)[None, [3, 0, 1, 2]] self._articulation_view.set_world_poses(position, orientation) + PoseAPI.invalidate() def get_position_orientation(self): # If the simulation isn't running, we should read from this prim's XForm (object-level) properties directly @@ -950,6 +953,7 @@ def set_local_pose(self, position=None, orientation=None): if orientation is not None: orientation = np.asarray(orientation)[None, [3, 0, 1, 2]] self._articulation_view.set_local_poses(position, orientation) + PoseAPI.invalidate() def get_local_pose(self): # If the simulation isn't running, we should read from this prim's XForm (object-level) properties directly diff --git a/omnigibson/prims/geom_prim.py b/omnigibson/prims/geom_prim.py index 849ea86fd..cf31ca583 100644 --- a/omnigibson/prims/geom_prim.py +++ b/omnigibson/prims/geom_prim.py @@ -7,6 +7,7 @@ from omnigibson.macros import gm from omnigibson.prims.xform_prim import XFormPrim from omnigibson.utils.python_utils import assert_valid_key +from omnigibson.utils.usd_utils import PoseAPI import omnigibson.utils.transform_utils as T @@ -130,7 +131,7 @@ def points(self): mesh = self.prim mesh_type = mesh.GetPrimTypeInfo().GetTypeName() if mesh_type == "Mesh": - return self.prim.GetAttribute("points").Get() + return np.array(self.prim.GetAttribute("points").Get()) # Generate a trimesh for other shapes if mesh_type == "Sphere": @@ -155,8 +156,8 @@ def points(self): raise ValueError(f"Cannot compute volume for mesh of type: {mesh_type}") # Return the vertices of the trimesh - return mesh.vertices - + return np.array(mesh.vertices) + @property def points_in_parent_frame(self): points = self.points @@ -168,7 +169,42 @@ def points_in_parent_frame(self): points_rotated = np.dot(T.quat2mat(orientation), points_scaled.T).T points_transformed = points_rotated + position return points_transformed - + + @property + def aabb(self): + world_pose_w_scale = PoseAPI.get_world_pose_with_scale(self.prim_path) + + # transform self.points into world frame + points = self.points + points_homogeneous = np.hstack((points, np.ones((points.shape[0], 1)))) + points_transformed = (points_homogeneous @ world_pose_w_scale.T)[:,:3] + + aabb_lo = np.min(points_transformed, axis=0) + aabb_hi = np.max(points_transformed, axis=0) + return aabb_lo, aabb_hi + + @property + def aabb_extent(self): + """ + Bounding box extent of this geom prim + + Returns: + 3-array: (x,y,z) bounding box + """ + min_corner, max_corner = self.aabb + return max_corner - min_corner + + @property + def aabb_center(self): + """ + Bounding box center of this geom prim + + Returns: + 3-array: (x,y,z) bounding box center + """ + min_corner, max_corner = self.aabb + return (max_corner + min_corner) / 2.0 + @cached_property def extent(self): """ diff --git a/omnigibson/prims/joint_prim.py b/omnigibson/prims/joint_prim.py index 61133eed7..19d64bb8d 100644 --- a/omnigibson/prims/joint_prim.py +++ b/omnigibson/prims/joint_prim.py @@ -4,7 +4,7 @@ import omnigibson.lazy as lazy from omnigibson.macros import create_module_macros from omnigibson.prims.prim_base import BasePrim -from omnigibson.utils.usd_utils import create_joint +from omnigibson.utils.usd_utils import PoseAPI, create_joint from omnigibson.utils.constants import JointType, JointAxis from omnigibson.utils.python_utils import assert_valid_key import omnigibson.utils.transform_utils as T @@ -736,6 +736,7 @@ def set_pos(self, pos, normalized=False, drive=False): # Set the DOF(s) in this joint if not drive: self._articulation_view.set_joint_positions(positions=pos, joint_indices=self.dof_indices) + PoseAPI.invalidate() # Also set the target self._articulation_view.set_joint_position_targets(positions=pos, joint_indices=self.dof_indices) diff --git a/omnigibson/prims/rigid_prim.py b/omnigibson/prims/rigid_prim.py index defeacf93..96a9bcb71 100644 --- a/omnigibson/prims/rigid_prim.py +++ b/omnigibson/prims/rigid_prim.py @@ -1,5 +1,5 @@ from functools import cached_property -from scipy.spatial import ConvexHull +import scipy import numpy as np import omnigibson as og @@ -9,7 +9,7 @@ from omnigibson.prims.geom_prim import CollisionGeomPrim, VisualGeomPrim from omnigibson.utils.constants import GEOM_TYPES from omnigibson.utils.sim_utils import CsRawData -from omnigibson.utils.usd_utils import get_mesh_volume_and_com, check_extent_radius_ratio +from omnigibson.utils.usd_utils import PoseAPI, get_mesh_volume_and_com, check_extent_radius_ratio import omnigibson.utils.transform_utils as T from omnigibson.utils.ui_utils import create_module_logger @@ -302,6 +302,7 @@ def set_position_orientation(self, position=None, orientation=None): f"{self.prim_path} desired orientation {orientation} is not a unit quaternion." orientation = np.asarray(orientation)[None, [3, 0, 1, 2]] self._rigid_prim_view.set_world_poses(positions=position, orientations=orientation) + PoseAPI.invalidate() def get_position_orientation(self): # Return cached pose if we're kinematic-only @@ -328,6 +329,7 @@ def set_local_pose(self, position=None, orientation=None): if orientation is not None: orientation = np.asarray(orientation)[None, [3, 0, 1, 2]] self._rigid_prim_view.set_local_poses(position, orientation) + PoseAPI.invalidate() def get_local_pose(self): # Return cached pose if we're kinematic-only @@ -624,7 +626,7 @@ def _compute_points_on_convex_hull(self, visual): points = np.concatenate(points, axis=0) try: - hull = ConvexHull(points) + hull = scipy.spatial.ConvexHull(points) return points[hull.vertices, :] except scipy.spatial.qhull.QhullError: # Handle the case where a convex hull cannot be formed (e.g., collinear points) diff --git a/omnigibson/prims/xform_prim.py b/omnigibson/prims/xform_prim.py index dccdeed5a..049301e15 100644 --- a/omnigibson/prims/xform_prim.py +++ b/omnigibson/prims/xform_prim.py @@ -6,8 +6,10 @@ from omnigibson.prims.prim_base import BasePrim from omnigibson.prims.material_prim import MaterialPrim from omnigibson.utils.transform_utils import quat2euler +from omnigibson.utils.usd_utils import PoseAPI +import omnigibson.utils.transform_utils as T from scipy.spatial.transform import Rotation as R - +from omnigibson.macros import gm class XFormPrim(BasePrim): """ @@ -153,32 +155,20 @@ def set_position_orientation(self, position=None, orientation=None): Default is None, which means left unchanged. """ current_position, current_orientation = self.get_position_orientation() + position = current_position if position is None else np.array(position, dtype=float) orientation = current_orientation if orientation is None else np.array(orientation, dtype=float) - orientation = orientation[[3, 0, 1, 2]] # Flip from x,y,z,w to w,x,y,z assert np.isclose(np.linalg.norm(orientation), 1, atol=1e-3), \ f"{self.prim_path} desired orientation {orientation} is not a unit quaternion." - mat = lazy.pxr.Gf.Transform() - mat.SetRotation(lazy.pxr.Gf.Rotation(lazy.pxr.Gf.Quatd(*orientation))) - mat.SetTranslation(lazy.pxr.Gf.Vec3d(*position)) + my_world_transform = T.pose2mat((position, orientation)) - # mat.SetScale(lazy.pxr.Gf.Vec3d(*(self.get_world_scale() / self.scale))) - # TODO (eric): understand why this (mat.setScale) works - this works empirically but it's unclear why. - mat.SetScale(lazy.pxr.Gf.Vec3d(*(self.scale.astype(np.float64)))) - my_world_transform = np.transpose(mat.GetMatrix()) + parent_prim = lazy.omni.isaac.core.utils.prims.get_prim_parent(self._prim) + parent_path = str(parent_prim.GetPath()) + parent_world_transform = PoseAPI.get_world_pose_with_scale(parent_path) - parent_world_tf = lazy.pxr.UsdGeom.Xformable(lazy.omni.isaac.core.utils.prims.get_prim_parent(self._prim)).ComputeLocalToWorldTransform(lazy.pxr.Usd.TimeCode.Default()) - parent_world_transform = np.transpose(parent_world_tf) - - local_transform = np.matmul(np.linalg.inv(parent_world_transform), my_world_transform) - transform = lazy.pxr.Gf.Transform() - transform.SetMatrix(lazy.pxr.Gf.Matrix4d(np.transpose(local_transform))) - calculated_translation = transform.GetTranslation() - calculated_orientation = transform.GetRotation().GetQuat() - self.set_local_pose( - position=np.array(calculated_translation), orientation=lazy.omni.isaac.core.utils.rotations.gf_quat_to_np_array(calculated_orientation)[[1, 2, 3, 0]] # Flip from w,x,y,z to x,y,z,w - ) + local_transform = np.linalg.inv(parent_world_transform) @ my_world_transform + self.set_local_pose(*T.mat2pose(local_transform)) def get_position_orientation(self): """ @@ -189,12 +179,7 @@ def get_position_orientation(self): - 3-array: (x,y,z) position in the world frame - 4-array: (x,y,z,w) quaternion orientation in the world frame """ - prim_tf = lazy.pxr.UsdGeom.Xformable(self._prim).ComputeLocalToWorldTransform(lazy.pxr.Usd.TimeCode.Default()) - transform = lazy.pxr.Gf.Transform() - transform.SetMatrix(prim_tf) - position = transform.GetTranslation() - orientation = transform.GetRotation().GetQuat() - return np.array(position), lazy.omni.isaac.core.utils.rotations.gf_quat_to_np_array(orientation)[[1, 2, 3, 0]] + return PoseAPI.get_world_pose(self._prim_path) def set_position(self, position): """ @@ -266,9 +251,8 @@ def get_local_pose(self): - 3-array: (x,y,z) position in the local frame - 4-array: (x,y,z,w) quaternion orientation in the local frame """ - xform_translate_op = self.get_attribute("xformOp:translate") - xform_orient_op = self.get_attribute("xformOp:orient") - return np.array(xform_translate_op), lazy.omni.isaac.core.utils.rotations.gf_quat_to_np_array(xform_orient_op)[[1, 2, 3, 0]] + pos, ori = lazy.omni.isaac.core.utils.xforms.get_local_pose(self.prim_path) + return pos, ori[[1, 2, 3, 0]] def set_local_pose(self, position=None, orientation=None): """ @@ -279,7 +263,7 @@ def set_local_pose(self, position=None, orientation=None): (with respect to its parent prim). Default is None, which means left unchanged. orientation (None or 4-array): if specified, (x,y,z,w) quaternion orientation in the local frame of the prim (with respect to its parent prim). Default is None, which means left unchanged. - """ + """ properties = self.prim.GetPropertyNames() if position is not None: position = lazy.pxr.Gf.Vec3d(*np.array(position, dtype=float)) @@ -300,6 +284,14 @@ def set_local_pose(self, position=None, orientation=None): else: rotq = lazy.pxr.Gf.Quatd(*orientation) xform_op.Set(rotq) + PoseAPI.invalidate() + if gm.ENABLE_FLATCACHE: + # If flatcache is on, make sure the USD local pose is synced to the fabric local pose. + # Ideally we should call usdrt's set local pose directly, but there is no such API. + # The only available API is SetLocalXformFromUsd, so we update USD first, and then sync to fabric. + xformable_prim = lazy.usdrt.Rt.Xformable(lazy.omni.isaac.core.utils.prims.get_prim_at_path(self.prim_path, fabric=True)) + assert not xformable_prim.HasWorldXform(), "Fabric's world pose is set for a non-rigid prim which is unexpected. Please report this." + xformable_prim.SetLocalXformFromUsd() return def get_world_scale(self): @@ -313,6 +305,13 @@ def get_world_scale(self): transform = lazy.pxr.Gf.Transform() transform.SetMatrix(prim_tf) return np.array(transform.GetScale()) + + @property + def scaled_transform(self): + """ + Returns the scaled transform of this prim. + """ + return PoseAPI.get_world_pose_with_scale(self._prim_path) @property def scale(self): diff --git a/omnigibson/simulator.py b/omnigibson/simulator.py index 1542bfacf..899c9d346 100644 --- a/omnigibson/simulator.py +++ b/omnigibson/simulator.py @@ -19,7 +19,7 @@ from omnigibson.utils.config_utils import NumpyEncoder from omnigibson.utils.python_utils import clear as clear_pu, create_object_from_init_info, Serializable from omnigibson.utils.sim_utils import meets_minimum_isaac_version -from omnigibson.utils.usd_utils import clear as clear_uu, FlatcacheAPI, RigidContactAPI +from omnigibson.utils.usd_utils import clear as clear_uu, FlatcacheAPI, RigidContactAPI, PoseAPI from omnigibson.utils.ui_utils import (CameraMover, disclaimer, create_module_logger, suppress_omni_log, print_icon, print_logo, logo_small) from omnigibson.scenes import Scene @@ -561,6 +561,11 @@ def _reset_variables(self): """ Reset internal variables when a new stage is loaded """ + + def render(self): + super().render() + # During rendering, the Fabric API is updated, so we can mark it as clean + PoseAPI.mark_valid() def update_handles(self): # Handles are only relevant when physx is running @@ -768,6 +773,7 @@ def step_physics(self): """ self._physics_context._step(current_time=self.current_time) self._omni_update_step() + PoseAPI.invalidate() def _on_contact(self, contact_headers, contact_data): """ diff --git a/omnigibson/systems/macro_particle_system.py b/omnigibson/systems/macro_particle_system.py index b9cce9a06..f9db90b89 100644 --- a/omnigibson/systems/macro_particle_system.py +++ b/omnigibson/systems/macro_particle_system.py @@ -371,7 +371,7 @@ def update(cls): # Run super first super().update() - z_extent = cls.particle_object.extent[2] + z_extent = cls.particle_object.aabb_extent[2] # Iterate over all objects, and update all particles belonging to any cloth objects for name, obj in cls._group_objects.items(): group = cls.get_group_name(obj=obj) @@ -479,7 +479,7 @@ def generate_group_particles( scales = cls.sample_scales_by_group(group=group, n=n_particles) if scales is None else scales - bbox_extents_local = [(cls.particle_object.extent * scale).tolist() for scale in scales] + bbox_extents_local = [(cls.particle_object.aabb_extent * scale).tolist() for scale in scales] # If we're using flatcache, we need to update the object's pose on the USD manually if gm.ENABLE_FLATCACHE: @@ -535,7 +535,7 @@ def generate_group_particles_on_object(cls, group, max_samples, min_samples_for_ # which is what we would get naively if we directly use @scales avg_scale = np.cbrt(np.product(obj.scale)) - bbox_extents_global = scales * cls.particle_object.extent.reshape(1, 3) * avg_scale + bbox_extents_global = scales * cls.particle_object.aabb_extent.reshape(1, 3) * avg_scale if obj.prim_type == PrimType.CLOTH: # Sample locations based on randomly sampled keyfaces diff --git a/omnigibson/systems/system_base.py b/omnigibson/systems/system_base.py index 74be78502..9e6d01abe 100644 --- a/omnigibson/systems/system_base.py +++ b/omnigibson/systems/system_base.py @@ -682,7 +682,7 @@ def _compute_relative_group_scales(cls, group): ) # Convert these into scaling factors for the x and y axes for our particle object - particle_bbox = cls.particle_object.extent + particle_bbox = cls.particle_object.aabb_extent minimum = np.array([bbox_lower_limit / particle_bbox[0], bbox_lower_limit / particle_bbox[1], 1.0]) maximum = np.array([bbox_upper_limit / particle_bbox[0], bbox_upper_limit / particle_bbox[1], 1.0]) diff --git a/omnigibson/utils/usd_utils.py b/omnigibson/utils/usd_utils.py index 03f54ece1..d0207fe3b 100644 --- a/omnigibson/utils/usd_utils.py +++ b/omnigibson/utils/usd_utils.py @@ -549,10 +549,57 @@ def reset(cls): cls.MODIFIED_PRIMS = set() +class PoseAPI: + """ + This is a singleton class for getting world poses. + Whenever we directly set the pose of a prim, we should call PoseAPI.invalidate(). + After that, if we need to access the pose of a prim without stepping physics, + this class will refresh the poses by syncing across USD-fabric-PhysX depending on the flatcache setting. + """ + VALID = False + + @classmethod + def invalidate(cls): + cls.VALID = False + + @classmethod + def mark_valid(cls): + cls.VALID = True + + @classmethod + def _refresh(cls): + if og.sim is not None and not cls.VALID: + # when flatcache is on + if og.sim._physx_fabric_interface: + # no time step is taken here + og.sim._physx_fabric_interface.update(og.sim.get_physics_dt(), og.sim.current_time) + # when flatcache is off + else: + # no time step is taken here + og.sim.psi.fetch_results() + cls.mark_valid() + + @classmethod + def get_world_pose(cls, prim_path): + cls._refresh() + position, orientation = lazy.omni.isaac.core.utils.xforms.get_world_pose(prim_path) + return np.array(position), np.array(orientation)[[1, 2, 3, 0]] + + @classmethod + def get_world_pose_with_scale(cls, prim_path): + """ + This is used when information about the prim's global scale is needed, + e.g. when converting points in the prim frame to the world frame. + """ + cls._refresh() + return np.array(lazy.omni.isaac.core.utils.xforms._get_world_pose_transform_w_scale(prim_path)).T + + def clear(): """ Clear state tied to singleton classes """ + PoseAPI.invalidate() CollisionAPI.clear() diff --git a/tests/test_robot_states.py b/tests/test_robot_states.py new file mode 100644 index 000000000..e4776d03a --- /dev/null +++ b/tests/test_robot_states.py @@ -0,0 +1,119 @@ +import numpy as np + +import omnigibson as og +from omnigibson.macros import gm + +import omnigibson.lazy as lazy +from omnigibson.sensors import VisionSensor +from omnigibson.utils.transform_utils import pose2mat, mat2pose, relative_pose_transform +from omnigibson.utils.usd_utils import PoseAPI + +def setup_environment(flatcache=True): + """ + Sets up the environment with or without flatcache based on the flatcache parameter. + """ + # Ensure any existing simulation is stopped + if og.sim is not None: + og.sim.stop() + + # Set global flags + gm.ENABLE_OBJECT_STATES = True + gm.USE_GPU_DYNAMICS = True + gm.ENABLE_FLATCACHE = flatcache # Set based on function parameter + + # Define the environment configuration + config = { + "scene": { + "type": "Scene", + }, + "robots": [ + { + "type": "Fetch", + "obs_modalities": [], + "position": [150, 150, 100], + "orientation": [0, 0, 0, 1], + "controller_config": { + "arm_0": { + "name": "NullJointController", + "motor_type": "position", + }, + }, + } + ] + } + + env = og.Environment(configs=config) + return env + +def camera_pose_test(flatcache): + env = setup_environment(flatcache) + robot = env.robots[0] + env.reset() + + sensors = [s for s in robot.sensors.values() if isinstance(s, VisionSensor)] + assert len(sensors) > 0 + vision_sensor = sensors[0] + + # Get vision sensor world pose via directly calling get_position_orientation + robot_world_pos, robot_world_ori = robot.get_position_orientation() + sensor_world_pos, sensor_world_ori = vision_sensor.get_position_orientation() + + robot_to_sensor_mat = pose2mat(relative_pose_transform(sensor_world_pos, sensor_world_ori, robot_world_pos, robot_world_ori)) + + sensor_world_pos_gt = np.array([150.16513062, 150.0, 101.3833847]) + sensor_world_ori_gt = np.array([-0.29444987, 0.29444981, 0.64288363, -0.64288352]) + + assert np.allclose(sensor_world_pos, sensor_world_pos_gt) + assert np.allclose(sensor_world_ori, sensor_world_ori_gt) + + # Now, we want to move the robot and check if the sensor pose has been updated + old_camera_local_pose = vision_sensor.get_local_pose() + + robot.set_position_orientation(position=[100, 100, 100]) + new_camera_local_pose = vision_sensor.get_local_pose() + new_camera_world_pose = vision_sensor.get_position_orientation() + robot_pose_mat = pose2mat(robot.get_position_orientation()) + expected_camera_world_pos, expected_camera_world_ori = mat2pose(robot_pose_mat @ robot_to_sensor_mat) + assert np.allclose(old_camera_local_pose[0], new_camera_local_pose[0]) + assert np.allclose(new_camera_world_pose[0], expected_camera_world_pos) + assert np.allclose(new_camera_world_pose[1], expected_camera_world_ori) + + # Then, we want to move the local pose of the camera and check + # 1) if the world pose is updated 2) if the robot stays in the same position + old_camera_local_pose = vision_sensor.get_local_pose() + vision_sensor.set_local_pose(position=[10, 10, 10], orientation=[0, 0, 0, 1]) + new_camera_world_pose = vision_sensor.get_position_orientation() + camera_parent_prim = lazy.omni.isaac.core.utils.prims.get_prim_parent(vision_sensor.prim) + camera_parent_path = str(camera_parent_prim.GetPath()) + camera_parent_world_transform = PoseAPI.get_world_pose_with_scale(camera_parent_path) + expected_new_camera_world_pos, expected_new_camera_world_ori = mat2pose(camera_parent_world_transform @ pose2mat([[10, 10, 10], [0, 0, 0, 1]])) + assert np.allclose(new_camera_world_pose[0], expected_new_camera_world_pos) + assert np.allclose(new_camera_world_pose[1], expected_new_camera_world_ori) + assert np.allclose(robot.get_position(), [100, 100, 100]) + + + # Finally, we want to move the world pose of the camera and check + # 1) if the local pose is updated 2) if the robot stays in the same position + robot.set_position_orientation(position=[150, 150, 100]) + old_camera_local_pose = vision_sensor.get_local_pose() + vision_sensor.set_position_orientation([150, 150, 101.36912537], [-0.29444987, 0.29444981, 0.64288363, -0.64288352]) + new_camera_local_pose = vision_sensor.get_local_pose() + assert not np.allclose(old_camera_local_pose[0], new_camera_local_pose[0]) + assert not np.allclose(old_camera_local_pose[1], new_camera_local_pose[1]) + assert np.allclose(robot.get_position(), [150, 150, 100]) + + # Another test we want to try is setting the camera's parent scale and check if the world pose is updated + camera_parent_prim.GetAttribute('xformOp:scale').Set(lazy.pxr.Gf.Vec3d([2.0, 2.0, 2.0])) + camera_parent_world_transform = PoseAPI.get_world_pose_with_scale(camera_parent_path) + camera_local_pose = vision_sensor.get_local_pose() + expected_new_camera_world_pos, _ = mat2pose(camera_parent_world_transform @ pose2mat(camera_local_pose)) + new_camera_world_pose = vision_sensor.get_position_orientation() + assert np.allclose(new_camera_world_pose[0], expected_new_camera_world_pos) + + og.sim.clear() + +def test_camera_pose_flatcache_on(): + camera_pose_test(True) + +def test_camera_pose_flatcache_off(): + camera_pose_test(False)