diff --git a/html/.buildinfo b/html/.buildinfo
new file mode 100644
index 0000000..9e4a001
--- /dev/null
+++ b/html/.buildinfo
@@ -0,0 +1,4 @@
+# Sphinx build info version 1
+# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done.
+config: 4d636bbedac67a11c4b09a71c19549c1
+tags: 645f666f9bcd5a90fca523b33c5a78b7
diff --git a/html/_images/before_rename_urdf_img.png b/html/_images/before_rename_urdf_img.png
new file mode 100644
index 0000000..9220ca0
Binary files /dev/null and b/html/_images/before_rename_urdf_img.png differ
diff --git a/html/_images/follow_target_mycobot_demo.png b/html/_images/follow_target_mycobot_demo.png
new file mode 100644
index 0000000..7ea5f33
Binary files /dev/null and b/html/_images/follow_target_mycobot_demo.png differ
diff --git a/html/_images/lula_test_extension.png b/html/_images/lula_test_extension.png
new file mode 100644
index 0000000..b515612
Binary files /dev/null and b/html/_images/lula_test_extension.png differ
diff --git a/html/_images/robot_model_class.png b/html/_images/robot_model_class.png
new file mode 100644
index 0000000..9a17316
Binary files /dev/null and b/html/_images/robot_model_class.png differ
diff --git a/html/_images/robot_model_controller.png b/html/_images/robot_model_controller.png
new file mode 100644
index 0000000..3552d8b
Binary files /dev/null and b/html/_images/robot_model_controller.png differ
diff --git a/html/_images/robot_model_yml.png b/html/_images/robot_model_yml.png
new file mode 100644
index 0000000..3fd3899
Binary files /dev/null and b/html/_images/robot_model_yml.png differ
diff --git a/html/_images/robot_models_add_controller.png b/html/_images/robot_models_add_controller.png
new file mode 100644
index 0000000..a7e3155
Binary files /dev/null and b/html/_images/robot_models_add_controller.png differ
diff --git a/html/_modules/grutopia/core/datahub/api.html b/html/_modules/grutopia/core/datahub/api.html
new file mode 100644
index 0000000..7eb805d
--- /dev/null
+++ b/html/_modules/grutopia/core/datahub/api.html
@@ -0,0 +1,438 @@
+
+
+
+
+
+
+
+
+
+
+fromtypingimportAny,Dict,List,Optional
+
+frompydanticimportBaseModel
+
+
+classMetaActionData(BaseModel):
+"""
+ action status in grutopia
+ """
+ controller:str
+ data:Any
+
+
+classActionData(BaseModel):
+"""
+ action status in grutopia
+ """
+ robot:str
+ controllers:List[MetaActionData]
+
+
+class_IsaacData(BaseModel):
+"""
+ isaac status in grutopia
+ """
+ actions:Optional[List[Dict[str,Any]]]
+ obs:Optional[List[Dict[str,Any]]]
+
+
+
[docs]classIsaacData:
+"""
+ isaac status in grutopia
+
+ There are two types of isaac status:
+
+ * Action
+ * Observation
+
+ structure of isaac status like this::
+
+ {
+ actions: {
+ [
+ {
+ robot_1: {
+ cap: param,
+ }
+ }
+ ]
+ },
+ observations: {
+ [
+ {
+ robot_1: {
+ obs_1: data,
+ obs_2: data
+ }
+ }
+ ]
+ }
+ }
+
+ """
+ data=_IsaacData(actions=[],obs=[])
+
+ def__init__(self)->None:
+ pass
+
+ @classmethod
+ defget_all(cls)->_IsaacData:
+ returncls.data
+
+ # Observation
+ @classmethod
+ defset_obs_data(cls,obs:List[Dict[str,Any]])->None:
+ cls.data.obs=obs
+
+
[docs]@classmethod
+ defget_obs(cls)->List[Dict[str,Any]]:
+"""
+ Get isaac observation data
+
+ Returns:
+ isaac observation data list
+ """
+ returncls.data.obs
+
+
[docs]@classmethod
+ defget_obs_by_id(cls,task_id:int)->Dict[str,Any]:
+"""
+ Get isaac observation by id
+
+ Args:
+ task_id: isaac task id
+
+ Returns:
+ isaac observation data
+
+ """
+ returncls.data.obs[task_id]
+"""
+Includes web api endpoints
+"""
+fromtypingimportAny,Dict,List
+
+importhttpx
+
+fromgrutopia.core.datahub.isaac_dataimportActionData
+
+# constants
+WebBEUrl='http://127.0.0.1:9000'# TODO config this
+GetAllObsPath=WebBEUrl+'/api/stream/get_all_obs'
+GetObsByIdPath=WebBEUrl+'/api/stream/get_obs_by_id/'
+FlushObsUrl=WebBEUrl+'/api/isaac/flush_obs_data'
+SetActionsUrl=WebBEUrl+'/api/isaac/set_action'
+GetAllActionUrl=WebBEUrl+'/api/isaac/get_actions'
+GetActionByIdUrl=WebBEUrl+'/api/isaac/get_action_by_id/'
+
+
+
[docs]defget_all_obs()->List[Dict[str,Any]]|None:
+"""
+ Get all observation data
+ Returns:
+ obs (List[Dict[str, Any]]): List of all observation data
+ """
+ r=httpx.get(GetAllObsPath)
+ ifr.status_code==200:
+ returnr.json()
+ returnNone
+
+
+
[docs]defget_obs_by_id(task_id:int)->Any|None:
+"""
+ Get observation by id
+ Args:
+ task_id (int): id of observation data
+
+ Returns:
+ obs (Any): Observation data
+ """
+ r=httpx.get(GetObsByIdPath+str(task_id))
+ ifr.status_code==200:
+ returnr.json()
+
+
+
[docs]defset_obs_data(obs:List[Dict[str,Any]])->bool:
+"""
+ Set observation data web API
+ Args:
+ obs (List[Dict[str, Any]]): isaac observation data
+
+ Returns:
+ OK if set successfully
+ """
+ r=httpx.post(FlushObsUrl,json=obs,timeout=1)
+ ifr.status_code==200andr.json()['msg']=='OK':
+ returnTrue
+ returnFalse
[docs]defsend_chain_of_thought(cot:str,uuid:str='none')->None:
+"""
+ chain of thought data
+
+ Args:
+ uuid (str): uuid of chain of thought data, defaults to "none".
+ cot (str): chain of thought data.
+ """
+
+ defcot_format(x):
+ return{'type':'text','value':x}
+
+ res_data=[{'type':'time','value':datetime.datetime.now().strftime('%H:%M')}]
+ foriincot:
+ res_data.append(cot_format(i))
+ AsyncRequest.post(uuid,SendCOTUrl,res_data)
+
+
+
[docs]defsend_chat_control(nickname:str,text:str,img:str=None,role:str='user',uuid:str='none')->None:
+"""Send a new message to the chatbox.
+
+ Args:
+ nickname (str): nickname displayed in the chatbox.
+ text (str): text to send to the chatbox.
+ img (str, optional): image to send to the chatbox. Defaults to None.
+ role (str, optional): role name, user or agent. Defaults to "user".
+ uuid (str, optional): uuid of the message. Defaults to 'none'.
+ """
+ avatar_url=AvatarUrls.get(role,DefaultAvatarUrl)
+ res_data={
+ 'type':role,
+ 'name':nickname,
+ 'time':datetime.datetime.now().strftime('%H:%M'),
+ 'message':text,
+ 'photo':avatar_url,
+ 'img':img,
+ }
+ AsyncRequest.post(uuid,SendChatControlUrl,res_data)
[docs]defstep(self,actions:List[Dict[str,Any]])->List[Dict[str,Any]]:
+"""
+ run step with given action(with isaac step)
+
+ Args:
+ actions (List[Dict[str, Any]]): action(with isaac step)
+
+ Returns:
+ List[Dict[str, Any]]: observations(with isaac step)
+ """
+ iflen(actions)!=len(self.config.tasks):
+ raiseAssertionError('len of action list is not equal to len of task list')
+ _actions=[]
+ foraction_idx,actioninenumerate(actions):
+ _action={}
+ fork,vinaction.items():
+ _action[f'{k}_{action_idx}']=v
+ _actions.append(_action)
+ action_after_reshape={
+ self.config.tasks[action_idx].name:action
+ foraction_idx,actioninenumerate(_actions)
+ }
+
+ # log.debug(action_after_reshape)
+ self._runner.step(action_after_reshape)
+ observations=self.get_observations()
+ returnobservations
+
+
[docs]defreset(self,envs:List[int]=None):
+"""
+ reset the environment(use isaac word reset)
+
+ Args:
+ envs (List[int]): env need to be reset(default for reset all envs)
+ """
+ ifenvsisnotNone:
+ iflen(envs)==0:
+ return
+ log.debug(f'============= reset: {envs} ==============')
+ # int -> name
+ self._runner.reset([self.config.tasks[e].nameforeinenvs])
+ returnself.get_observations(),{}
+ self._runner.reset()
+ returnself.get_observations(),{}
+
+
[docs]defget_observations(self)->List[Dict[str,Any]]:
+"""
+ Get observations from Isaac environment
+ Returns:
+ List[Dict[str, Any]]: observations
+ """
+ _obs=self._runner.get_obs()
+ return_obs
+
+ defrender(self,mode='human'):
+ return
+
+
[docs]defclose(self):
+"""close the environment"""
+ self._simulation_app.close()
+ return
[docs]defget_obs(self)->dict:
+"""Get observation of robot, including controllers, sensors, and world pose.
+
+ Raises:
+ NotImplementedError: _description_
+ """
+ raiseNotImplementedError()
+
+
[docs]defget_robot_ik_base(self)->RigidPrim:
+"""Get base link of ik controlled parts.
+
+ Returns:
+ RigidPrim: rigid prim of ik base link.
+ """
+ raiseNotImplementedError()
+
+
[docs]defget_robot_base(self)->RigidPrim:
+"""
+ Get base link of robot.
+
+ Returns:
+ RigidPrim: rigid prim of robot base link.
+ """
+ raiseNotImplementedError()
+
+
[docs]defget_robot_scale(self)->np.ndarray:
+"""Get robot scale.
+
+ Returns:
+ np.ndarray: robot scale in (x, y, z).
+ """
+ returnself.isaac_robot.get_local_scale()
[docs]classBaseSensor(ABC):
+"""Base class of sensor."""
+ sensors={}
+
+ def__init__(self,config:SensorModel,robot:BaseRobot,scene:Scene):
+"""Initialize the sensor.
+
+ Args:
+ config (SensorModel): merged config (from user config and robot model) of the sensor.
+ robot (BaseRobot): robot owning the sensor.
+ scene (Scene): scene from isaac sim.
+ """
+ ifconfig.nameisNone:
+ raiseValueError('must specify sensor name.')
+ self.name=config.name
+ self.config=config
+ self._scene=scene
+ self._robot=robot
+
+ @abstractmethod
+ defsensor_init(self):
+ raiseNotImplementedError()
+
+
[docs]@abstractmethod
+ defget_data(self)->Dict:
+"""Get data from sensor.
+
+ Returns:
+ Dict: data dict of sensor.
+ """
+ raiseNotImplementedError()
+
+
[docs]@classmethod
+ defregister(cls,name:str):
+"""
+ Register a sensor class with the given name(decorator).
+ Args:
+ name(str): name of the sensor class.
+ """
+
+ defdecorator(sensor_class):
+ cls.sensors[name]=sensor_class
+
+ @wraps(sensor_class)
+ defwrapped_function(*args,**kwargs):
+ returnsensor_class(*args,**kwargs)
+
+ returnwrapped_function
+
+ returndecorator
+
+
+
[docs]defconfig_inject(params:SensorParams,model:SensorModel)->SensorModel:
+"""Merge sensor config from user config and robot model.
+
+ Args:
+ params (SensorParams): user config.
+ model (SensorModel): sensor config from robot model.
+
+ Returns:
+ SensorModel: merged sensor config.
+ """
+ ifparamsisNone:
+ returnmodel
+ config=model.dict()
+ user=params.dict()
+ fork,vinuser.items():
+ ifvisnotNone:
+ config[k]=v
+ conf=SensorModel(**config)
+
+ returnconf
+
+
+
[docs]defcreate_sensors(config:RobotUserConfig,robot_model:RobotModel,robot:BaseRobot,
+ scene:Scene)->Dict[str,BaseSensor]:
+"""Create all sensors of one robot.
+
+ Args:
+ config (RobotUserConfig): user config of the robot.
+ robot_model (RobotModel): model of the robot.
+ robot (BaseRobot): robot instance.
+ scene (Scene): scene from isaac sim.
+
+ Returns:
+ Dict[str, BaseSensor]: dict of sensors with sensor name as key.
+ """
+ sensor_map={}
+ ifrobot_model.sensorsisnotNone:
+ available_sensors={a.name:aforainrobot_model.sensors}
+ forsensor_name,sensorinavailable_sensors.items():
+ ifsensor.typenotinBaseSensor.sensors:
+ raiseKeyError(f'unknown sensor type "{sensor.type}"')
+ sensor_cls=BaseSensor.sensors[sensor.type]
+ # Find if user param exists for this sensor.
+ param=None
+ ifconfig.sensor_paramsisnotNone:
+ forpinconfig.sensor_params:
+ ifp.name==sensor_name:
+ param=p
+ break
+
+ sensor_ins=sensor_cls(config=config_inject(param,sensor),robot=robot,name=sensor_name,scene=scene)
+ sensor_map[sensor_name]=sensor_ins
+ sensor_ins.sensor_init()
+ log.debug(f'==================== {sensor_name} loaded==========================')
+
+ returnsensor_map
[docs]defadd_usd_ref(source_stage:Usd.Stage,dest_stage:Usd.Stage,src_prim_path:str,dest_prim_path:str)->None:
+"""
+ Add an opened usd into another usd as a reference
+ set name in dest_prim_path
+
+ Args:
+ source_stage (Usd.Stage): source stage
+ dest_stage (Usd.Stage): dest stage
+ src_prim_path (str): source prim path
+ dest_prim_path (str): dest prim path
+ """
+ src_root_layer=source_stage.GetRootLayer()
+ log.debug(src_root_layer.identifier)
+ source_prim=source_stage.GetPrimAtPath(src_prim_path)
+ dest_prim=dest_stage.DefinePrim(dest_prim_path,source_prim.GetTypeName())
+ dest_prim.GetReferences().AddReference(src_root_layer.identifier)
+ dest_stage.GetRootLayer().Save()
+
+
+
[docs]defget_local_transform_xform(prim:Usd.Prim)->typing.Tuple[Gf.Vec3d,Gf.Rotation,Gf.Vec3d]:
+"""
+ Get the local transformation of a prim using Xformable.
+
+ Args:
+ prim: The prim to calculate the local transformation.
+ Returns:
+ A tuple of:
+ - Translation vector.
+ - Rotation quaternion, i.e. 3d vector plus angle.
+ - Scale vector.
+ """
+ xform=UsdGeom.Xformable(prim)
+ local_transformation:Gf.Matrix4d=xform.GetLocalTransformation()
+ translation:Gf.Vec3d=local_transformation.ExtractTranslation()
+ rotation:Gf.Rotation=local_transformation.ExtractRotation()
+ scale:Gf.Vec3d=Gf.Vec3d(*(v.GetLength()forvinlocal_transformation.ExtractRotationMatrix()))
+ returntranslation,rotation,scale
+
+
+
[docs]defget_world_transform_xform(prim:Usd.Prim)->typing.Tuple[Gf.Vec3d,Gf.Rotation,Gf.Vec3d]:
+"""
+ Get the local transformation of a prim using Xformable.
+
+ Args:
+ prim: The prim to calculate the world transformation.
+ Returns:
+ A tuple of:
+ - Translation vector.
+ - Rotation quaternion, i.e. 3d vector plus angle.
+ - Scale vector.
+ """
+ xform=UsdGeom.Xformable(prim)
+ time=Usd.TimeCode.Default()
+ world_transform:Gf.Matrix4d=xform.ComputeLocalToWorldTransform(time)
+ translation:Gf.Vec3d=world_transform.ExtractTranslation()
+ rotation:Gf.Rotation=world_transform.ExtractRotation()
+ scale:Gf.Vec3d=Gf.Vec3d(*(v.GetLength()forvinworld_transform.ExtractRotationMatrix()))
+ returntranslation,rotation,scale
+
+
+
[docs]defcreate_new_usd(new_usd_path:str,default_prim_name:str,default_axis:str=None)->Usd.Stage:
+"""
+ Create a new usd
+
+ Args:
+ new_usd_path (str): where to place this new usd
+ default_prim_name (str): default prim name (root prim path)
+ default_axis (str): default axis for new usd
+ """
+ stage:Usd.Stage=Usd.Stage.CreateNew(new_usd_path)
+ default_prim:Usd.Prim=UsdGeom.Xform.Define(stage,Sdf.Path('/'+default_prim_name)).GetPrim()
+ _set_default_prim(stage,default_prim)
+ _set_up_axis(stage,default_axis)
+ stage.GetRootLayer().Save()
+ returnstage
+
+
+def_set_up_axis(stage:Usd.Stage,axis_str:str=None)->None:
+"""
+ Set default axis for a stage
+
+ Args:
+ stage (Usd.Stage): objects stage
+ axis_str (str, optional): axis str, 'y' or 'z', set 'z' if None. Defaults to None.
+ """
+ ifaxis_str=='y'oraxis_str=='Y':
+ axis:UsdGeom.Tokens=UsdGeom.Tokens.y
+ else:
+ axis:UsdGeom.Tokens=UsdGeom.Tokens.z
+ UsdGeom.SetStageUpAxis(stage,axis)
+
+
+def_set_default_prim(stage:Usd.Stage,prim:Usd.Prim)->None:
+"""
+ Set default prim for a stage
+
+ Args:
+ stage (Usd.Stage): objects stage
+ prim (Usd.Prim): prim in this stage
+ """
+ stage.SetDefaultPrim(prim)
+
+
+
[docs]defcompute_bbox(prim:Usd.Prim)->Gf.Range3d:
+"""
+ Compute Bounding Box using ComputeWorldBound at UsdGeom.Imageable
+
+ Args:
+ prim: A prim to compute the bounding box.
+ Returns:
+ A range (i.e. bounding box)
+ """
+ imageable:UsdGeom.Imageable=UsdGeom.Imageable(prim)
+ time=Usd.TimeCode.Default()
+ bound=imageable.ComputeWorldBound(time,UsdGeom.Tokens.default_)
+ bound_range=bound.ComputeAlignedBox()
+ returnbound_range
+
+
+
[docs]defdelete_prim_in_stage(stage:Usd.Stage,prim:Usd.Prim)->None:
+"""
+ Delete a prim in stage
+
+ Args:
+ stage (Usd.Stage): objects stage
+ prim (Usd.Prim): prim to be deleted
+ """
+ stage.RemovePrim(prim.GetPrimPath())
+
+
+
[docs]defset_xform_of_prim(prim:Usd.Prim,xform_op:str,set_valve:typing.Any)->None:
+"""
+ Set xform data of a prim with new data
+
+ Args:
+ prim (Usd.Prim): objects prim
+ xform_op (str): which op to be set
+ set_valve (typing.Any): new data to be set, could be np.array
+ """
+ stage=prim.GetStage()
+ op_list=prim.GetAttribute('xformOpOrder').Get()
+ s=None
+ foriinop_list:
+ ifxform_op==i:
+ log.debug(prim.GetAttribute(i))
+ s=prim.GetAttribute(i)
+ trans=s.Get()
+ trans_value=set_valve
+ data_class=type(trans)
+ time_code=Usd.TimeCode.Default()
+ new_data=data_class(*trans_value)
+ s.Set(new_data,time_code)
+ stage.Save()
+
+
+
[docs]defdelete_xform_of_prim(prim:Usd.Prim,xform_op:str)->None:
+"""
+ Delete xform data of a prim
+
+ Args:
+ prim (Usd.Prim): objects prim
+ xform_op (str): which op to be deleted
+ """
+ stage=prim.GetStage()
+ ifprim.HasAttribute(xform_op):
+ # Clear the attribute from the Prim
+ prim.GetAttribute(xform_op).Clear()
+ stage.Save()
+
+
+
[docs]defadd_xform_of_prim(prim:Usd.Prim,xform_op:str,set_valve:typing.Any)->None:
+"""
+ Add xform data of a prim with new data
+
+ Args:
+ prim (Usd.Prim): objects prim
+ xform_op (str): which op to be set
+ set_valve (typing.Any): new data to be set, could be Gf.Vec3d, Gf.Rotation
+ """
+ stage=prim.GetStage()
+ attribute_name=xform_op
+ attribute_value=set_valve
+ opType=get_xformop_type(xform_op)
+ precision=get_xformop_precision('float')
+ attribute=UsdGeom.Xformable(prim).AddXformOp(opType,precision)
+ ifattribute:
+ attribute.Set(attribute_value)
+ # log.debug(f"Attribute {attribute_name} has been set to {attribute_value}.")
+ else:
+ log.debug(f'Failed to create attribute named {attribute_name}.')
+ stage.Save()
+
+
+
[docs]defadd_xform_of_prim_old(prim:Usd.Prim,xform_op:str,set_valve:typing.Any)->None:
+"""
+ Add xform data of a prim with new data
+
+ Args:
+ prim (Usd.Prim): objects prim
+ xform_op (str): which op to be set
+ set_valve (typing.Any): new data to be set, could be Gf.Vec3d, Gf.Rotation
+ """
+ stage=prim.GetStage()
+ attribute_name=xform_op
+ attribute_value=set_valve
+ if'3'intype(set_valve).__name__:
+ attribute_type=Sdf.ValueTypeNames.Float3
+ else:
+ attribute_type=Sdf.ValueTypeNames.Float
+ attribute=prim.CreateAttribute(attribute_name,attribute_type)
+ ifattribute:
+ attribute.Set(attribute_value)
+ # log.debug(f"Attribute {attribute_name} has been set to {attribute_value}.")
+ else:
+ log.debug(f'Failed to create attribute named {attribute_name}.')
+ stage.Save()
[docs]@abstractmethod
+ defis_done(self)->bool:
+"""
+ Returns True of the task is done.
+
+ Raises:
+ NotImplementedError: this must be overridden.
+ """
+ raiseNotImplementedError
+
+
[docs]defindividual_reset(self):
+"""
+ reload this task individually without reloading whole world.
+ """
+ raiseNotImplementedError
[docs]defpost_reset(self)->None:
+"""Calls while doing a .reset() on the world."""
+ self.steps=0
+ forrobotinself.robots.values():
+ robot.post_reset()
+ return
+
+
[docs]@classmethod
+ defregister(cls,name:str):
+"""
+ Register a task with its name(decorator).
+ Args:
+ name(str): name of the task
+ """
+
+ defdecorator(tasks_class):
+ cls.tasks[name]=tasks_class
+
+ @wraps(tasks_class)
+ defwrapped_function(*args,**kwargs):
+ returntasks_class(*args,**kwargs)
+
+ returnwrapped_function
+
+ returndecorator
+# Copyright (c) 2022-2024, The ORBIT Project Developers.
+# All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+"""Sub-module containing utilities for working with different array backends."""
+
+fromtypingimportUnion
+
+importnumpyasnp
+importtorch
+importwarpaswp
+
+TensorData=Union[np.ndarray,torch.Tensor,wp.array]
+"""Type definition for a tensor data.
+
+Union of numpy, torch, and warp arrays.
+"""
+
+TENSOR_TYPES={
+ 'numpy':np.ndarray,
+ 'torch':torch.Tensor,
+ 'warp':wp.array,
+}
+"""A dictionary containing the types for each backend.
+
+The keys are the name of the backend ("numpy", "torch", "warp") and the values are the corresponding type
+(``np.ndarray``, ``torch.Tensor``, ``wp.array``).
+"""
+
+TENSOR_TYPE_CONVERSIONS={
+ 'numpy':{
+ wp.array:lambdax:x.numpy(),
+ torch.Tensor:lambdax:x.detach().cpu().numpy()
+ },
+ 'torch':{
+ wp.array:lambdax:wp.torch.to_torch(x),
+ np.ndarray:lambdax:torch.from_numpy(x)
+ },
+ 'warp':{
+ np.array:lambdax:wp.array(x),
+ torch.Tensor:lambdax:wp.torch.from_torch(x)
+ },
+}
+"""A nested dictionary containing the conversion functions for each backend.
+
+The keys of the outer dictionary are the name of target backend ("numpy", "torch", "warp"). The keys of the
+inner dictionary are the source backend (``np.ndarray``, ``torch.Tensor``, ``wp.array``).
+"""
+
+
+
[docs]defconvert_to_torch(
+ array:TensorData,
+ dtype:torch.dtype=None,
+ device:torch.device|str|None=None,
+)->torch.Tensor:
+"""Converts a given array into a torch tensor.
+
+ The function tries to convert the array to a torch tensor. If the array is a numpy/warp arrays, or python
+ list/tuples, it is converted to a torch tensor. If the array is already a torch tensor, it is returned
+ directly.
+
+ If ``device`` is None, then the function deduces the current device of the data. For numpy arrays,
+ this defaults to "cpu", for torch tensors it is "cpu" or "cuda", and for warp arrays it is "cuda".
+
+ Note:
+ Since PyTorch does not support unsigned integer types, unsigned integer arrays are converted to
+ signed integer arrays. This is done by casting the array to the corresponding signed integer type.
+
+ Args:
+ array: The input array. It can be a numpy array, warp array, python list/tuple, or torch tensor.
+ dtype: Target data-type for the tensor.
+ device: The target device for the tensor. Defaults to None.
+
+ Returns:
+ The converted array as torch tensor.
+ """
+ # Convert array to tensor
+ # if the datatype is not currently supported by torch we need to improvise
+ # supported types are: https://pytorch.org/docs/stable/tensors.html
+ ifisinstance(array,torch.Tensor):
+ tensor=array
+ elifisinstance(array,np.ndarray):
+ ifarray.dtype==np.uint32:
+ array=array.astype(np.int32)
+ # need to deal with object arrays (np.void) separately
+ tensor=torch.from_numpy(array)
+ elifisinstance(array,wp.array):
+ ifarray.dtype==wp.uint32:
+ array=array.view(wp.int32)
+ tensor=wp.to_torch(array)
+ else:
+ tensor=torch.Tensor(array)
+ # Convert tensor to the right device
+ ifdeviceisnotNoneandstr(tensor.device)!=str(device):
+ tensor=tensor.to(device)
+ # Convert dtype of tensor if requested
+ ifdtypeisnotNoneandtensor.dtype!=dtype:
+ tensor=tensor.type(dtype)
+
+ returntensor
+# Copyright (c) 2022-2024, The ORBIT Project Developers.
+# All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+"""Sub-module that provides a wrapper around the Python 3.7 onwards ``dataclasses`` module."""
+
+importinspect
+fromcollections.abcimportCallable
+fromcopyimportdeepcopy
+fromdataclassesimportMISSING,Field,dataclass,field,replace
+fromtypingimportAny,ClassVar
+
+from.dictimportclass_to_dict,update_class_from_dict
+
+_CONFIGCLASS_METHODS=['to_dict','from_dict','replace','copy']
+"""List of class methods added at runtime to dataclass."""
+"""
+Wrapper around dataclass.
+"""
+
+
+def__dataclass_transform__():
+"""Add annotations decorator for PyLance."""
+ returnlambdaa:a
+
+
+
[docs]@__dataclass_transform__()
+defconfigclass(cls,**kwargs):
+"""Wrapper around `dataclass` functionality to add extra checks and utilities.
+
+ As of Python 3.7, the standard dataclasses have two main issues which makes them non-generic for
+ configuration use-cases. These include:
+
+ 1. Requiring a type annotation for all its members.
+ 2. Requiring explicit usage of :meth:`field(default_factory=...)` to reinitialize mutable variables.
+
+ This function provides a decorator that wraps around Python's `dataclass`_ utility to deal with
+ the above two issues. It also provides additional helper functions for dictionary <-> class
+ conversion and easily copying class instances.
+
+ Usage:
+
+ .. code-block:: python
+
+ from dataclasses import MISSING
+
+ from omni.isaac.orbit.utils.configclass import configclass
+
+
+ @configclass
+ class ViewerCfg:
+ eye: list = [7.5, 7.5, 7.5] # field missing on purpose
+ lookat: list = field(default_factory=[0.0, 0.0, 0.0])
+
+
+ @configclass
+ class EnvCfg:
+ num_envs: int = MISSING
+ episode_length: int = 2000
+ viewer: ViewerCfg = ViewerCfg()
+
+ # create configuration instance
+ env_cfg = EnvCfg(num_envs=24)
+
+ # print information as a dictionary
+ print(env_cfg.to_dict())
+
+ # create a copy of the configuration
+ env_cfg_copy = env_cfg.copy()
+
+ # replace arbitrary fields using keyword arguments
+ env_cfg_copy = env_cfg_copy.replace(num_envs=32)
+
+ Args:
+ cls: The class to wrap around.
+ **kwargs: Additional arguments to pass to :func:`dataclass`.
+
+ Returns:
+ The wrapped class.
+
+ .. _dataclass: https://docs.python.org/3/library/dataclasses.html
+ """
+ # add type annotations
+ _add_annotation_types(cls)
+ # add field factory
+ _process_mutable_types(cls)
+ # copy mutable members
+ # note: we check if user defined __post_init__ function exists and augment it with our own
+ ifhasattr(cls,'__post_init__'):
+ setattr(cls,'__post_init__',_combined_function(cls.__post_init__,_custom_post_init))
+ else:
+ setattr(cls,'__post_init__',_custom_post_init)
+ # add helper functions for dictionary conversion
+ setattr(cls,'to_dict',_class_to_dict)
+ setattr(cls,'from_dict',_update_class_from_dict)
+ setattr(cls,'replace',_replace_class_with_kwargs)
+ setattr(cls,'copy',_copy_class)
+ # wrap around dataclass
+ cls=dataclass(cls,**kwargs)
+ # return wrapped class
+ returncls
+
+
+"""
+Dictionary <-> Class operations.
+
+These are redefined here to add new docstrings.
+"""
+
+
+def_class_to_dict(obj:object)->dict[str,Any]:
+"""Convert an object into dictionary recursively.
+
+ Returns:
+ Converted dictionary mapping.
+ """
+ returnclass_to_dict(obj)
+
+
+def_update_class_from_dict(obj,data:dict[str,Any])->None:
+"""Reads a dictionary and sets object variables recursively.
+
+ This function performs in-place update of the class member attributes.
+
+ Args:
+ data: Input (nested) dictionary to update from.
+
+ Raises:
+ TypeError: When input is not a dictionary.
+ ValueError: When dictionary has a value that does not match default config type.
+ KeyError: When dictionary has a key that does not exist in the default config type.
+ """
+ returnupdate_class_from_dict(obj,data,_ns='')
+
+
+def_replace_class_with_kwargs(obj:object,**kwargs)->object:
+"""Return a new object replacing specified fields with new values.
+
+ This is especially useful for frozen classes. Example usage:
+
+ .. code-block:: python
+
+ @configclass(frozen=True)
+ class C:
+ x: int
+ y: int
+
+ c = C(1, 2)
+ c1 = c.replace(x=3)
+ assert c1.x == 3 and c1.y == 2
+
+ Args:
+ obj: The object to replace.
+ **kwargs: The fields to replace and their new values.
+
+ Returns:
+ The new object.
+ """
+ returnreplace(obj,**kwargs)
+
+
+def_copy_class(obj:object)->object:
+"""Return a new object with the same fields as the original."""
+ returnreplace(obj)
+
+
+"""
+Private helper functions.
+"""
+
+
+def_add_annotation_types(cls):
+"""Add annotations to all elements in the dataclass.
+
+ By definition in Python, a field is defined as a class variable that has a type annotation.
+
+ In case type annotations are not provided, dataclass ignores those members when :func:`__dict__()` is called.
+ This function adds these annotations to the class variable to prevent any issues in case the user forgets to
+ specify the type annotation.
+
+ This makes the following a feasible operation:
+
+ @dataclass
+ class State:
+ pos = (0.0, 0.0, 0.0)
+ ^^
+ If the function is NOT used, the following type-error is returned:
+ TypeError: 'pos' is a field but has no type annotation
+ """
+ # get type hints
+ hints={}
+ # iterate over class inheritance
+ # we add annotations from base classes first
+ forbaseinreversed(cls.__mro__):
+ # check if base is object
+ ifbaseisobject:
+ continue
+ # get base class annotations
+ ann=base.__dict__.get('__annotations__',{})
+ # directly add all annotations from base class
+ hints.update(ann)
+ # iterate over base class members
+ # Note: Do not change this to dir(base) since it orders the members alphabetically.
+ # This is not desirable since the order of the members is important in some cases.
+ forkeyinbase.__dict__:
+ # get class member
+ value=getattr(base,key)
+ # skip members
+ if_skippable_class_member(key,value,hints):
+ continue
+ # add type annotations for members that don't have explicit type annotations
+ # for these, we deduce the type from the default value
+ ifnotisinstance(value,type):
+ ifkeynotinhints:
+ # check if var type is not MISSING
+ # we cannot deduce type from MISSING!
+ ifvalueisMISSING:
+ raiseTypeError(f"Missing type annotation for '{key}' in class '{cls.__name__}'."
+ ' Please add a type annotation or set a default value.')
+ # add type annotation
+ hints[key]=type(value)
+ elifkey!=value.__name__:
+ # note: we don't want to add type annotations for nested configclass. Thus, we check if
+ # the name of the type matches the name of the variable.
+ # since Python 3.10, type hints are stored as strings
+ hints[key]=f'type[{value.__name__}]'
+
+ # Note: Do not change this line. `cls.__dict__.get("__annotations__", {})` is different from
+ # `cls.__annotations__` because of inheritance.
+ cls.__annotations__=cls.__dict__.get('__annotations__',{})
+ cls.__annotations__=hints
+
+
+def_process_mutable_types(cls):
+"""Initialize all mutable elements through :obj:`dataclasses.Field` to avoid unnecessary complaints.
+
+ By default, dataclass requires usage of :obj:`field(default_factory=...)` to reinitialize mutable objects every time a new
+ class instance is created. If a member has a mutable type and it is created without specifying the `field(default_factory=...)`,
+ then Python throws an error requiring the usage of `default_factory`.
+
+ Additionally, Python only explicitly checks for field specification when the type is a list, set or dict. This misses the
+ use-case where the type is class itself. Thus, the code silently carries a bug with it which can lead to undesirable effects.
+
+ This function deals with this issue
+
+ This makes the following a feasible operation:
+
+ @dataclass
+ class State:
+ pos: list = [0.0, 0.0, 0.0]
+ ^^
+ If the function is NOT used, the following value-error is returned:
+ ValueError: mutable default <class 'list'> for field pos is not allowed: use default_factory
+ """
+ # note: Need to set this up in the same order as annotations. Otherwise, it
+ # complains about missing positional arguments.
+ ann=cls.__dict__.get('__annotations__',{})
+
+ # iterate over all class members and store them in a dictionary
+ class_members={}
+ forbaseinreversed(cls.__mro__):
+ # check if base is object
+ ifbaseisobject:
+ continue
+ # iterate over base class members
+ forkeyinbase.__dict__:
+ # get class member
+ f=getattr(base,key)
+ # skip members
+ if_skippable_class_member(key,f):
+ continue
+ # store class member if it is not a type or if it is already present in annotations
+ ifnotisinstance(f,type)orkeyinann:
+ class_members[key]=f
+ # iterate over base class data fields
+ # in previous call, things that became a dataclass field were removed from class members
+ # so we need to add them back here as a dataclass field directly
+ forkey,finbase.__dict__.get('__dataclass_fields__',{}).items():
+ # store class member
+ ifnotisinstance(f,type):
+ class_members[key]=f
+
+ # check that all annotations are present in class members
+ # note: mainly for debugging purposes
+ iflen(class_members)!=len(ann):
+ raiseValueError(
+ f"In class '{cls.__name__}', number of annotations ({len(ann)}) does not match number of class members"
+ f' ({len(class_members)}). Please check that all class members have type annotations and/or a default'
+ " value. If you don't want to specify a default value, please use the literal `dataclasses.MISSING`.")
+ # iterate over annotations and add field factory for mutable types
+ forkeyinann:
+ # find matching field in class
+ value=class_members.get(key,MISSING)
+ # check if key belongs to ClassVar
+ # in that case, we cannot use default_factory!
+ origin=getattr(ann[key],'__origin__',None)
+ iforiginisClassVar:
+ continue
+ # check if f is MISSING
+ # note: commented out for now since it causes issue with inheritance
+ # of dataclasses when parent have some positional and some keyword arguments.
+ # Ref: https://stackoverflow.com/questions/51575931/class-inheritance-in-python-3-7-dataclasses
+ # TODO: check if this is fixed in Python 3.10
+ # if f is MISSING:
+ # continue
+ ifisinstance(value,Field):
+ setattr(cls,key,value)
+ elifnotisinstance(value,type):
+ # create field factory for mutable types
+ value=field(default_factory=_return_f(value))
+ setattr(cls,key,value)
+
+
+def_custom_post_init(obj):
+"""Deepcopy all elements to avoid shared memory issues for mutable objects in dataclasses initialization.
+
+ This function is called explicitly instead of as a part of :func:`_process_mutable_types()` to prevent mapping
+ proxy type i.e. a read only proxy for mapping objects. The error is thrown when using hierarchical data-classes
+ for configuration.
+ """
+ forkeyindir(obj):
+ # skip dunder members
+ ifkey.startswith('__'):
+ continue
+ # get data member
+ value=getattr(obj,key)
+ # duplicate data members
+ ifnotcallable(value):
+ setattr(obj,key,deepcopy(value))
+
+
+def_combined_function(f1:Callable,f2:Callable)->Callable:
+"""Combine two functions into one.
+
+ Args:
+ f1: The first function.
+ f2: The second function.
+
+ Returns:
+ The combined function.
+ """
+
+ def_combined(*args,**kwargs):
+ # call both functions
+ f1(*args,**kwargs)
+ f2(*args,**kwargs)
+
+ return_combined
+
+
+"""
+Helper functions
+"""
+
+
+def_skippable_class_member(key:str,value:Any,hints:dict|None=None)->bool:
+"""Check if the class member should be skipped in configclass processing.
+
+ The following members are skipped:
+
+ * Dunder members: ``__name__``, ``__module__``, ``__qualname__``, ``__annotations__``, ``__dict__``.
+ * Manually-added special class functions: From :obj:`_CONFIGCLASS_METHODS`.
+ * Members that are already present in the type annotations.
+ * Functions bounded to class object or class.
+
+ Args:
+ key: The class member name.
+ value: The class member value.
+ hints: The type hints for the class. Defaults to None, in which case, the
+ members existence in type hints are not checked.
+
+ Returns:
+ True if the class member should be skipped, False otherwise.
+ """
+ # skip dunder members
+ ifkey.startswith('__'):
+ returnTrue
+ # skip manually-added special class functions
+ ifkeyin_CONFIGCLASS_METHODS:
+ returnTrue
+ # check if key is already present
+ ifhintsisnotNoneandkeyinhints:
+ returnTrue
+ # skip functions bounded to class
+ ifcallable(value):
+ signature=inspect.signature(value)
+ if'self'insignature.parametersor'cls'insignature.parameters:
+ returnTrue
+ # Otherwise, don't skip
+ returnFalse
+
+
+def_return_f(f:Any)->Callable[[],Any]:
+"""Returns default factory function for creating mutable/immutable variables.
+
+ This function should be used to create default factory functions for variables.
+
+ Example:
+
+ .. code-block:: python
+
+ value = field(default_factory=_return_f(value))
+ setattr(cls, key, value)
+ """
+
+ def_wrap():
+ ifisinstance(f,Field):
+ iff.default_factoryisMISSING:
+ returndeepcopy(f.default)
+ else:
+ returnf.default_factory
+ else:
+ returnf
+
+ return_wrap
+
+# Copyright (c) 2022-2024, The ORBIT Project Developers.
+# All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+# yapf: disable
+
+"""Sub-module for utilities for working with dictionaries."""
+
+importcollections.abc
+importhashlib
+importjson
+fromcollections.abcimportIterable,Mapping
+fromtypingimportAny
+
+from.arrayimportTENSOR_TYPE_CONVERSIONS,TENSOR_TYPES
+from.stringimportcallable_to_string,string_to_callable
+
+"""
+Dictionary <-> Class operations.
+"""
+
+
+
[docs]defclass_to_dict(obj:object)->dict[str,Any]:
+"""Convert an object into dictionary recursively.
+
+ Note:
+ Ignores all names starting with "__" (i.e. built-in methods).
+
+ Args:
+ obj: An instance of a class to convert.
+
+ Raises:
+ ValueError: When input argument is not an object.
+
+ Returns:
+ Converted dictionary mapping.
+ """
+ # check that input data is class instance
+ ifnothasattr(obj,'__class__'):
+ raiseValueError(f'Expected a class instance. Received: {type(obj)}.')
+ # convert object to dictionary
+ ifisinstance(obj,dict):
+ obj_dict=obj
+ else:
+ obj_dict=obj.__dict__
+ # convert to dictionary
+ data=dict()
+ forkey,valueinobj_dict.items():
+ # disregard builtin attributes
+ ifkey.startswith('__'):
+ continue
+ # check if attribute is callable -- function
+ ifcallable(value):
+ data[key]=callable_to_string(value)
+ # check if attribute is a dictionary
+ elifhasattr(value,'__dict__')orisinstance(value,dict):
+ data[key]=class_to_dict(value)
+ else:
+ data[key]=value
+ returndata
+
+
+
[docs]defupdate_class_from_dict(obj,data:dict[str,Any],_ns:str='')->None:
+"""Reads a dictionary and sets object variables recursively.
+
+ This function performs in-place update of the class member attributes.
+
+ Args:
+ obj: An instance of a class to update.
+ data: Input dictionary to update from.
+ _ns: Namespace of the current object. This is useful for nested configuration
+ classes or dictionaries. Defaults to "".
+
+ Raises:
+ TypeError: When input is not a dictionary.
+ ValueError: When dictionary has a value that does not match default config type.
+ KeyError: When dictionary has a key that does not exist in the default config type.
+ """
+ forkey,valueindata.items():
+ # key_ns is the full namespace of the key
+ key_ns=_ns+'/'+key
+ # check if key is present in the object
+ ifhasattr(obj,key):
+ obj_mem=getattr(obj,key)
+ ifisinstance(obj_mem,Mapping):
+ # Note: We don't handle two-level nested dictionaries. Just use configclass if this is needed.
+ # iterate over the dictionary to look for callable values
+ fork,vinobj_mem.items():
+ ifcallable(v):
+ value[k]=string_to_callable(value[k])
+ setattr(obj,key,value)
+ elifisinstance(value,Mapping):
+ # recursively call if it is a dictionary
+ update_class_from_dict(obj_mem,value,_ns=key_ns)
+ elifisinstance(value,Iterable)andnotisinstance(value,str):
+ # check length of value to be safe
+ iflen(obj_mem)!=len(value)andobj_memisnotNone:
+ raiseValueError(
+ f'[Config]: Incorrect length under namespace: {key_ns}.'
+ f' Expected: {len(obj_mem)}, Received: {len(value)}.'
+ )
+ # set value
+ setattr(obj,key,value)
+ elifcallable(obj_mem):
+ # update function name
+ value=string_to_callable(value)
+ setattr(obj,key,value)
+ elifisinstance(value,type(obj_mem)):
+ # check that they are type-safe
+ setattr(obj,key,value)
+ else:
+ raiseValueError(
+ f'[Config]: Incorrect type under namespace: {key_ns}.'
+ f' Expected: {type(obj_mem)}, Received: {type(value)}.'
+ )
+ else:
+ raiseKeyError(f'[Config]: Key not found under namespace: {key_ns}.')
[docs]defdict_to_md5_hash(data:object)->str:
+"""Convert a dictionary into a hashable key using MD5 hash.
+
+ Args:
+ data: Input dictionary or configuration object to convert.
+
+ Returns:
+ A string object of double length containing only hexadecimal digits.
+ """
+ # convert to dictionary
+ ifisinstance(data,dict):
+ encoded_buffer=json.dumps(data,sort_keys=True).encode()
+ else:
+ encoded_buffer=json.dumps(class_to_dict(data),sort_keys=True).encode()
+ # compute hash using MD5
+ data_hash=hashlib.md5()
+ data_hash.update(encoded_buffer)
+ # return the hash key
+ returndata_hash.hexdigest()
+
+
+"""
+Dictionary operations.
+"""
+
+
+
[docs]defconvert_dict_to_backend(
+ data:dict,backend:str='numpy',array_types:Iterable[str]=('numpy','torch','warp')
+)->dict:
+"""Convert all arrays or tensors in a dictionary to a given backend.
+
+ This function iterates over the dictionary, converts all arrays or tensors with the given types to
+ the desired backend, and stores them in a new dictionary. It also works with nested dictionaries.
+
+ Currently supported backends are "numpy", "torch", and "warp".
+
+ Note:
+ This function only converts arrays or tensors. Other types of data are left unchanged. Mutable types
+ (e.g. lists) are referenced by the new dictionary, so they are not copied.
+
+ Args:
+ data: An input dict containing array or tensor data as values.
+ backend: The backend ("numpy", "torch", "warp") to which arrays in this dict should be converted.
+ Defaults to "numpy".
+ array_types: A list containing the types of arrays that should be converted to
+ the desired backend. Defaults to ("numpy", "torch", "warp").
+
+ Raises:
+ ValueError: If the specified ``backend`` or ``array_types`` are unknown, i.e. not in the list of supported
+ backends ("numpy", "torch", "warp").
+
+ Returns:
+ The updated dict with the data converted to the desired backend.
+ """
+ # THINK: Should we also support converting to a specific device, e.g. "cuda:0"?
+ # Check the backend is valid.
+ ifbackendnotinTENSOR_TYPE_CONVERSIONS:
+ raiseValueError(f"Unknown backend '{backend}'. Supported backends are 'numpy', 'torch', and 'warp'.")
+ # Define the conversion functions for each backend.
+ tensor_type_conversions=TENSOR_TYPE_CONVERSIONS[backend]
+
+ # Parse the array types and convert them to the corresponding types: "numpy" -> np.ndarray, etc.
+ parsed_types=list()
+ fortinarray_types:
+ # Check type is valid.
+ iftnotinTENSOR_TYPES:
+ raiseValueError(f"Unknown array type: '{t}'. Supported array types are 'numpy', 'torch', and 'warp'.")
+ # Exclude types that match the backend, since we do not need to convert these.
+ ift==backend:
+ continue
+ # Convert the string types to the corresponding types.
+ parsed_types.append(TENSOR_TYPES[t])
+
+ # Convert the data to the desired backend.
+ output_dict=dict()
+ forkey,valueindata.items():
+ # Obtain the data type of the current value.
+ data_type=type(value)
+ # -- arrays
+ ifdata_typeinparsed_types:
+ # check if we have a known conversion.
+ ifdata_typenotintensor_type_conversions:
+ raiseValueError(f'No registered conversion for data type: {data_type} to {backend}!')
+ # convert the data to the desired backend.
+ output_dict[key]=tensor_type_conversions[data_type](value)
+ # -- nested dictionaries
+ elifisinstance(data[key],dict):
+ output_dict[key]=convert_dict_to_backend(value)
+ # -- everything else
+ else:
+ output_dict[key]=value
+
+ returnoutput_dict
+
+
+
[docs]defupdate_dict(orig_dict:dict,new_dict:collections.abc.Mapping)->dict:
+"""Updates existing dictionary with values from a new dictionary.
+
+ This function mimics the dict.update() function. However, it works for
+ nested dictionaries as well.
+
+ Reference:
+ https://stackoverflow.com/questions/3232943/update-value-of-a-nested-dictionary-of-varying-depth
+
+ Args:
+ orig_dict: The original dictionary to insert items to.
+ new_dict: The new dictionary to insert items from.
+
+ Returns:
+ The updated dictionary.
+ """
+ forkeyname,valueinnew_dict.items():
+ ifisinstance(value,collections.abc.Mapping):
+ orig_dict[keyname]=update_dict(orig_dict.get(keyname,{}),value)
+ else:
+ orig_dict[keyname]=value
+ returnorig_dict
[docs]defcompute_path_bbox(prim_path:str)->typing.Tuple[carb.Double3,carb.Double3]:
+"""
+ Compute Bounding Box using omni.usd.UsdContext.compute_path_world_bounding_box
+ See https://docs.omniverse.nvidia.com/kit/docs/omni.usd/latest/omni.usd/omni.usd.UsdContext.html#\
+ omni.usd.UsdContext.compute_path_world_bounding_box
+
+ Args:
+ prim_path: A prim path to compute the bounding box.
+ Returns:
+ A range (i.e. bounding box) as a minimum point and maximum point.
+ """
+ returnomni.usd.get_context().compute_path_world_bounding_box(prim_path)
+
+
+
[docs]defget_pick_position(robot_base_position:np.ndarray,prim_path:str)->np.ndarray:
+"""Get the pick position for a manipulator robots to pick an objects at prim_path.
+ The pick position is simply the nearest top vertex of the objects's bounding box.
+
+ Args:
+ robot_base_position (np.ndarray): robots base position.
+ prim_path (str): prim path of objects to pick.
+
+ Returns:
+ np.ndarray: pick position.
+ """
+ bbox_0,bbox_1=compute_path_bbox(prim_path)
+
+ x1=bbox_0[0]
+ x2=bbox_1[0]
+ y1=bbox_0[1]
+ y2=bbox_1[1]
+ top_z=bbox_0[2]ifbbox_0[2]>bbox_1[2]elsebbox_1[2]
+
+ top_vertices=[
+ np.array([x1,y1,top_z]),
+ np.array([x1,y2,top_z]),
+ np.array([x2,y1,top_z]),
+ np.array([x2,y2,top_z]),
+ ]
+
+ print('================================ Top vertices: ',top_vertices,' ====================================')
+
+ pick_position=top_vertices[0]
+ forvertexintop_vertices:
+ ifnp.linalg.norm(robot_base_position-vertex)<np.linalg.norm(robot_base_position-pick_position):
+ pick_position=vertex
+
+ returnpick_position
+
+
+
[docs]defget_grabbed_able_xform_paths(root_path:str,prim:Usd.Prim,depth:int=3)->typing.List[str]:
+"""get all prim paths of Xform objects under specified prim.
+
+ Args:
+ root_path (str): root path of scenes.
+ prim (Usd.Prim): target prim.
+ depth (int, optional): expected depth of Xform objects relative to root_path. Defaults to 3.
+
+ Returns:
+ typing.List[str]: prim paths.
+ """
+ paths=[]
+ ifprimisNone:
+ returnpaths
+ print(f'get_grabbed_able_xform_paths: start to traverse {prim.GetPrimPath()}')
+ relative_prim_path=str(prim.GetPrimPath())[len(root_path):]
+ ifrelative_prim_path.count('/')<=depth:
+ forchildinprim.GetChildren():
+ ifchild.GetTypeName()=='Scope':
+ paths.extend(get_grabbed_able_xform_paths(root_path,child))
+ ifchild.GetTypeName()=='Xform':
+ paths.append(str(child.GetPrimPath()))
+
+ returnpaths
+
+
+
[docs]defget_world_transform_xform(prim:Usd.Prim)->typing.Tuple[Gf.Vec3d,Gf.Rotation,Gf.Vec3d]:
+"""
+ Get the local transformation of a prim using omni.usd.get_world_transform_matrix().
+ See https://docs.omniverse.nvidia.com/kit/docs/omni.usd/latest/omni.usd/omni.usd.get_world_transform_matrix.html
+ Args:
+ prim: The prim to calculate the world transformation.
+ Returns:
+ A tuple of:
+ - Translation vector.
+ - Rotation quaternion, i.e. 3d vector plus angle.
+ - Scale vector.
+ """
+ world_transform:Gf.Matrix4d=omni.usd.get_world_transform_matrix(prim)
+ translation:Gf.Vec3d=world_transform.ExtractTranslation()
+ rotation:Gf.Rotation=world_transform.ExtractRotation()
+ scale:Gf.Vec3d=Gf.Vec3d(*(v.GetLength()forvinworld_transform.ExtractRotationMatrix()))
+ returntranslation,rotation,scale
+
+
+
[docs]defnearest_xform_from_position(stage:Usd.Stage,
+ xform_paths:typing.List[str],
+ position:np.ndarray,
+ threshold:float=0)->str:
+"""get prim path of nearest Xform objects from the target position.
+
+ Args:
+ stage (Usd.Stage): usd stage.
+ xform_paths (typing.List[str]): full list of xforms paths.
+ position (np.ndarray): target position.
+ threshold (float, optional): max distance. Defaults to 0 (unlimited).
+
+ Returns:
+ str: prim path of the Xform objects, None if not found.
+ """
+ start=time.time()
+ ifthreshold==0:
+ threshold=1000000.0
+ min_dist=threshold
+ nearest_prim_path=None
+ forpathinxform_paths:
+ prim=stage.GetPrimAtPath(path)
+ ifprimisnotNoneandprim.IsValid():
+ pose=get_world_transform_xform(prim)
+ dist=np.linalg.norm(pose[0]-position)
+ ifdist<min_dist:
+ min_dist=dist
+ nearest_prim_path=path
+
+ print(f'nearest_xform_from_position costs: {time.time()-start}')
+ returnnearest_prim_path
+"""
+A set of utility functions for general python usage
+"""
+importinspect
+importre
+fromabcimportABCMeta
+fromcollections.abcimportIterable
+fromcopyimportdeepcopy
+fromfunctoolsimportwraps
+fromimportlibimportimport_module
+
+importnumpyasnp
+
+# Global dictionary storing all unique names
+NAMES=set()
+CLASS_NAMES=set()
+
+
+classClassProperty:
+
+ def__init__(self,f_get):
+ self.f_get=f_get
+
+ def__get__(self,owner_self,owner_cls):
+ returnself.f_get(owner_cls)
+
+
+
[docs]defsubclass_factory(name,base_classes,__init__=None,**kwargs):
+"""
+ Programmatically generates a new class type with name @name, subclassing from base classes @base_classes, with
+ corresponding __init__ call @__init__.
+
+ NOTE: If __init__ is None (default), the __init__ call from @base_classes will be used instead.
+
+ cf. https://stackoverflow.com/questions/15247075/how-can-i-dynamically-create-derived-classes-from-a-base-class
+
+ Args:
+ name (str): Generated class name
+ base_classes (type, or list of type): Base class(es) to use for generating the subclass
+ __init__ (None or function): Init call to use for the base class when it is instantiated. If None if specified,
+ the newly generated class will automatically inherit the __init__ call from @base_classes
+ **kwargs (any): keyword-mapped parameters to override / set in the child class, where the keys represent
+ the class / instance attribute to modify and the values represent the functions / value to set
+ """
+ # Standardize base_classes
+ base_classes=tuple(base_classesifisinstance(base_classes,Iterable)else[base_classes])
+
+ # Generate the new class
+ if__init__isnotNone:
+ kwargs['__init__']=__init__
+ returntype(name,base_classes,kwargs)
+
+
+
[docs]defsave_init_info(func):
+"""
+ Decorator to save the init info of an objects to objects._init_info.
+
+ _init_info contains class name and class constructor's input args.
+ """
+ sig=inspect.signature(func)
+
+ @wraps(func)# preserve func name, docstring, arguments list, etc.
+ defwrapper(self,*args,**kwargs):
+ values=sig.bind(self,*args,**kwargs)
+
+ # Prevent args of super init from being saved.
+ ifhasattr(self,'_init_info'):
+ func(*values.args,**values.kwargs)
+ return
+
+ # Initialize class's self._init_info.
+ self._init_info={'class_module':self.__class__.__module__,'class_name':self.__class__.__name__,'args':{}}
+
+ # Populate class's self._init_info.
+ fork,pinsig.parameters.items():
+ ifk=='self':
+ continue
+ ifkinvalues.arguments:
+ val=values.arguments[k]
+ ifp.kindin(inspect.Parameter.POSITIONAL_OR_KEYWORD,inspect.Parameter.KEYWORD_ONLY):
+ self._init_info['args'][k]=val
+ elifp.kind==inspect.Parameter.VAR_KEYWORD:
+ forkwarg_k,kwarg_valinvalues.arguments[k].items():
+ self._init_info['args'][kwarg_k]=kwarg_val
+
+ # Call the original function.
+ func(*values.args,**values.kwargs)
+
+ returnwrapper
+
+
+
[docs]classRecreatableMeta(type):
+"""
+ Simple metaclass that automatically saves __init__ args of the instances it creates.
+ """
+
+ def__new__(cls,clsname,bases,clsdict):
+ if'__init__'inclsdict:
+ clsdict['__init__']=save_init_info(clsdict['__init__'])
+ returnsuper().__new__(cls,clsname,bases,clsdict)
+
+
+
[docs]classRecreatableAbcMeta(RecreatableMeta,ABCMeta):
+"""
+ A composite metaclass of both RecreatableMeta and ABCMeta.
+
+ Adding in ABCMeta to resolve metadata conflicts.
+ """
+
+ pass
+
+
+
[docs]classRecreatable(metaclass=RecreatableAbcMeta):
+"""
+ Simple class that provides an abstract interface automatically saving __init__ args of
+ the classes inheriting it.
+ """
+
+
[docs]defget_init_info(self):
+"""
+ Grabs relevant initialization information for this class instance. Useful for directly
+ reloading an objects from this information, using @create_object_from_init_info.
+
+ Returns:
+ dict: Nested dictionary that contains this objects' initialization information
+ """
+ # Note: self._init_info is procedurally generated via @save_init_info called in metaclass
+ returnself._init_info
+
+
+
[docs]defcreate_object_from_init_info(init_info):
+"""
+ Create a new objects based on given init info.
+
+ Args:
+ init_info (dict): Nested dictionary that contains an objects's init information.
+
+ Returns:
+ any: Newly created objects.
+ """
+ module=import_module(init_info['class_module'])
+ cls=getattr(module,init_info['class_name'])
+ returncls(**init_info['args'],**init_info.get('kwargs',{}))
+
+
+
[docs]defmerge_nested_dicts(base_dict,extra_dict,inplace=False,verbose=False):
+"""
+ Iteratively updates @base_dict with values from @extra_dict. Note: This generates a new dictionary!
+
+ Args:
+ base_dict (dict): Nested base dictionary, which should be updated with all values from @extra_dict
+ extra_dict (dict): Nested extra dictionary, whose values will overwrite corresponding ones in @base_dict
+ inplace (bool): Whether to modify @base_dict in place or not
+ verbose (bool): If True, will print when keys are mismatched
+
+ Returns:
+ dict: Updated dictionary
+ """
+ # Loop through all keys in @extra_dict and update the corresponding values in @base_dict
+ base_dict=base_dictifinplaceelsedeepcopy(base_dict)
+ fork,vinextra_dict.items():
+ ifknotinbase_dict:
+ base_dict[k]=v
+ else:
+ ifisinstance(v,dict)andisinstance(base_dict[k],dict):
+ base_dict[k]=merge_nested_dicts(base_dict[k],v)
+ else:
+ not_equal=base_dict[k]!=v
+ ifisinstance(not_equal,np.ndarray):
+ not_equal=not_equal.any()
+ ifnot_equalandverbose:
+ print(f'Different values for key {k}: {base_dict[k]}, {v}\n')
+ base_dict[k]=np.array(v)ifisinstance(v,list)elsev
+
+ # Return new dict
+ returnbase_dict
+
+
+
[docs]defget_class_init_kwargs(cls):
+"""
+ Helper function to return a list of all valid keyword arguments (excluding "self") for the given @cls class.
+
+ Args:
+ cls (object): Class from which to grab __init__ kwargs
+
+ Returns:
+ list: All keyword arguments (excluding "self") specified by @cls __init__ constructor method
+ """
+ returnlist(inspect.signature(cls.__init__).parameters.keys())[1:]
+
+
+
[docs]defextract_subset_dict(dic,keys,copy=False):
+"""
+ Helper function to extract a subset of dictionary key-values from a current dictionary. Optionally (deep)copies
+ the values extracted from the original @dic if @copy is True.
+
+ Args:
+ dic (dict): Dictionary containing multiple key-values
+ keys (Iterable): Specific keys to extract from @dic. If the key doesn't exist in @dic, then the key is skipped
+ copy (bool): If True, will deepcopy all values corresponding to the specified @keys
+
+ Returns:
+ dict: Extracted subset dictionary containing only the specified @keys and their corresponding values
+ """
+ subset={k:dic[k]forkinkeysifkindic}
+ returndeepcopy(subset)ifcopyelsesubset
+
+
+
[docs]defextract_class_init_kwargs_from_dict(cls,dic,copy=False):
+"""
+ Helper function to return a dictionary of key-values that specifically correspond to @cls class's __init__
+ constructor method, from @dic which may or may not contain additional, irrelevant kwargs.
+ Note that @dic may possibly be missing certain kwargs as specified by cls.__init__. No error will be raised.
+
+ Args:
+ cls (object): Class from which to grab __init__ kwargs that will be be used as filtering keys for @dic
+ dic (dict): Dictionary containing multiple key-values
+ copy (bool): If True, will deepcopy all values corresponding to the specified @keys
+
+ Returns:
+ dict: Extracted subset dictionary possibly containing only the specified keys from cls.__init__ and their
+ corresponding values
+ """
+ # extract only relevant kwargs for this specific backbone
+ returnextract_subset_dict(
+ dic=dic,
+ keys=get_class_init_kwargs(cls),
+ copy=copy,
+ )
+
+
+
[docs]defassert_valid_key(key,valid_keys,name=None):
+"""
+ Helper function that asserts that @key is in dictionary @valid_keys keys. If not, it will raise an error.
+
+ Args:
+ key (any): key to check for in dictionary @dic's keys
+ valid_keys (Iterable): contains keys should be checked with @key
+ name (str or None): if specified, is the name associated with the key that will be printed out if the
+ key is not found. If None, default is "value"
+ """
+ ifnameisNone:
+ name='value'
+ assertkeyinvalid_keys,'Invalid {} received! Valid options are: {}, got: {}'.format(
+ name,
+ valid_keys.keys()ifisinstance(valid_keys,dict)elsevalid_keys,key)
+
+
+
[docs]defcreate_class_from_registry_and_config(cls_name,cls_registry,cfg,cls_type_descriptor):
+"""
+ Helper function to create a class with str type @cls_name, which should be a valid entry in @cls_registry, using
+ kwargs in dictionary form @cfg to pass to the constructor, with @cls_type_name specified for debugging
+
+ Args:
+ cls_name (str): Name of the class to create. This should correspond to the actual class type, in string form
+ cls_registry (dict): Class registry. This should map string names of valid classes to create to the
+ actual class type itself
+ cfg (dict): Any keyword arguments to pass to the class constructor
+ cls_type_descriptor (str): Description of the class type being created. This can be any string and is used
+ solely for debugging purposes
+
+ Returns:
+ any: Created class instance
+ """
+ # Make sure the requested class type is valid
+ assert_valid_key(key=cls_name,valid_keys=cls_registry,name=f'{cls_type_descriptor} type')
+
+ # Grab the kwargs relevant for the specific class
+ cls=cls_registry[cls_name]
+ cls_kwargs=extract_class_init_kwargs_from_dict(cls=cls,dic=cfg,copy=False)
+
+ # Create the class
+ returncls(**cls_kwargs)
+
+
+
[docs]defget_uuid(name,n_digits=8):
+"""
+ Helper function to create a unique @n_digits uuid given a unique @name
+
+ Args:
+ name (str): Name of the objects or class
+ n_digits (int): Number of digits of the uuid, default is 8
+
+ Returns:
+ int: uuid
+ """
+ returnabs(hash(name))%(10**n_digits)
+
+
+
[docs]defcamel_case_to_snake_case(camel_case_text):
+"""
+ Helper function to convert a camel case text to snake case, e.g. "StrawberrySmoothie" -> "strawberry_smoothie"
+
+ Args:
+ camel_case_text (str): Text in camel case
+
+ Returns:
+ str: snake case text
+ """
+ returnre.sub(r'(?<!^)(?=[A-Z])','_',camel_case_text).lower()
+
+
+
[docs]defsnake_case_to_camel_case(snake_case_text):
+"""
+ Helper function to convert a snake case text to camel case, e.g. "strawberry_smoothie" -> "StrawberrySmoothie"
+
+ Args:
+ snake_case_text (str): Text in snake case
+
+ Returns:
+ str: camel case text
+ """
+ return''.join(item.title()foriteminsnake_case_text.split('_'))
+
+
+
[docs]defmeets_minimum_version(test_version,minimum_version):
+"""
+ Verify that @test_version meets the @minimum_version
+
+ Args:
+ test_version (str): Python package version. Should be, e.g., 0.26.1
+ minimum_version (str): Python package version to test against. Should be, e.g., 0.27.2
+
+ Returns:
+ bool: Whether @test_version meets @minimum_version
+ """
+ test_nums=[int(num)fornumintest_version.split('.')]
+ minimum_nums=[int(num)fornuminminimum_version.split('.')]
+ assertlen(test_nums)==3
+ assertlen(minimum_nums)==3
+
+ fortest_num,minimum_numinzip(test_nums,minimum_nums):
+ iftest_num>minimum_num:
+ returnTrue
+ eliftest_num<minimum_num:
+ returnFalse
+ # Otherwise, we continue through all sub-versions
+
+ # If we get here, that means test_version == threshold_version, so this is a success
+ returnTrue
+
+
+
[docs]classUniquelyNamed:
+"""
+ Simple class that implements a name property, that must be implemented by a subclass. Note that any @Named
+ entity must be UNIQUE!
+ """
+
+ def__init__(self):
+ globalNAMES
+ # Register this objects, making sure it's name is unique
+ assertself.namenotinNAMES, \
+ f'UniquelyNamed objects with name {self.name} already exists!'
+ NAMES.add(self.name)
+
+ # def __del__(self):
+ # # Remove this objects name from the registry if it's still there
+ # self.remove_names(include_all_owned=True)
+
+
[docs]defremove_names(self,include_all_owned=True,skip_ids=None):
+"""
+ Checks if self.name exists in the global NAMES registry, and deletes it if so. Possibly also iterates through
+ all owned member variables and checks for their corresponding names if @include_all_owned is True.
+
+ Args:
+ include_all_owned (bool): If True, will iterate through all owned members of this instance and remove their
+ names as well, if they are UniquelyNamed
+
+ skip_ids (None or set of int): If specified, will skip over any ids in the specified set that are matched
+ to any attributes found (this compares id(attr) to @skip_ids).
+ """
+ # Make sure skip_ids is a set so we can pass this into the method, and add the dictionary so we don't
+ # get infinite recursive loops
+ skip_ids=set()ifskip_idsisNoneelseskip_ids
+ skip_ids.add(id(self))
+
+ # Check for this name, possibly remove it if it exists
+ ifself.nameinNAMES:
+ NAMES.remove(self.name)
+
+ # Also possibly iterate through all owned members and check if those are instances of UniquelyNamed
+ ifinclude_all_owned:
+ self._remove_names_recursively_from_dict(dic=self.__dict__,skip_ids=skip_ids)
+
+ def_remove_names_recursively_from_dict(self,dic,skip_ids=None):
+"""
+ Checks if self.name exists in the global NAMES registry, and deletes it if so
+
+ Args:
+ skip_ids (None or set): If specified, will skip over any objects in the specified set that are matched
+ to any attributes found.
+ """
+ # Make sure skip_ids is a set so we can pass this into the method, and add the dictionary so we don't
+ # get infinite recursive loops
+ skip_ids=set()ifskip_idsisNoneelseskip_ids
+ skip_ids.add(id(dic))
+
+ # Loop through all values in the inputted dictionary, and check if any of the values are UniquelyNamed
+ forname,valindic.items():
+ ifid(val)notinskip_ids:
+ # No need to explicitly add val to skip objects because the methods below handle adding it
+ ifisinstance(val,UniquelyNamed):
+ val.remove_names(include_all_owned=True,skip_ids=skip_ids)
+ elifisinstance(val,dict):
+ # Recursively iterate
+ self._remove_names_recursively_from_dict(dic=val,skip_ids=skip_ids)
+ elifhasattr(val,'__dict__'):
+ # Add the attribute and recursively iterate
+ skip_ids.add(id(val))
+ self._remove_names_recursively_from_dict(dic=val.__dict__,skip_ids=skip_ids)
+ else:
+ # Otherwise we just add the value to skip_ids so we don't check it again
+ skip_ids.add(id(val))
+
+ @property
+ defname(self):
+"""
+ Returns:
+ str: Name of this instance. Must be unique!
+ """
+ raiseNotImplementedError
+
+
+
[docs]classUniquelyNamedNonInstance:
+"""
+ Identical to UniquelyNamed, but intended for non-instanceable classes
+ """
+
+ def__init_subclass__(cls,**kwargs):
+ globalCLASS_NAMES
+ # Register this objects, making sure it's name is unique
+ assertcls.namenotinCLASS_NAMES, \
+ f'UniquelyNamed class with name {cls.name} already exists!'
+ CLASS_NAMES.add(cls.name)
+
+ @ClassProperty
+ defname(self):
+"""
+ Returns:
+ str: Name of this instance. Must be unique!
+ """
+ raiseNotImplementedError
+
+
+
[docs]classRegisterable:
+"""
+ Simple class template that provides an abstract interface for registering classes.
+ """
+
+ def__init_subclass__(cls,**kwargs):
+"""
+ Registers all subclasses as part of this registry. This is useful to decouple internal codebase from external
+ user additions. This way, users can add their custom subclasses by simply extending this class,
+ and it will automatically be registered internally. This allows users to then specify their classes
+ directly in string-form in e.g., their config files, without having to manually set the str-to-class mapping
+ in our code.
+ """
+ cls._register_cls()
+
+ @classmethod
+ def_register_cls(cls):
+"""
+ Register this class. Can be extended by subclass.
+ """
+ # print(f"registering: {cls.__name__}")
+ # print(f"registry: {cls._cls_registry}", cls.__name__ not in cls._cls_registry)
+ # print(f"do not register: {cls._do_not_register_classes}", cls.__name__ not in cls._do_not_register_classes)
+ # input()
+ ifcls.__name__notincls._cls_registryandcls.__name__notincls._do_not_register_classes:
+ cls._cls_registry[cls.__name__]=cls
+
+ @ClassProperty
+ def_do_not_register_classes(self):
+"""
+ Returns:
+ set of str: Name(s) of classes that should not be registered. Default is empty set.
+ Subclasses that shouldn't be added should call super() and then add their own class name to the set
+ """
+ returnset()
+
+ @ClassProperty
+ def_cls_registry(self):
+"""
+ Returns:
+ dict: Mapping from all registered class names to their classes. This should be a REFERENCE
+ to some external, global dictionary that will be filled-in at runtime.
+ """
+ raiseNotImplementedError()
+
+
+
[docs]classSerializable:
+"""
+ Simple class that provides an abstract interface to dump / load states, optionally with serialized functionality
+ as well.
+ """
+
+ @property
+ defstate_size(self):
+"""
+ Returns:
+ int: Size of this objects's serialized state
+ """
+ raiseNotImplementedError()
+
+ def_dump_state(self):
+"""
+ Dumps the state of this objects in dictionary form (can be empty). Should be implemented by subclass.
+
+ Returns:
+ dict: Keyword-mapped states of this objects
+ """
+ raiseNotImplementedError()
+
+
[docs]defdump_state(self,serialized=False):
+"""
+ Dumps the state of this objects in either dictionary of flattened numerical form.
+
+ Args:
+ serialized (bool): If True, will return the state of this objects as a 1D numpy array. Otherwise,
+ will return a (potentially nested) dictionary of states for this objects
+
+ Returns:
+ dict or n-array: Either:
+ - Keyword-mapped states of these objects, or
+ - encoded + serialized, 1D numerical np.array \
+ capturing this objects' state, where n is @self.state_size
+ """
+ state=self._dump_state()
+ returnself.serialize(state=state)ifserializedelsestate
+
+ def_load_state(self,state):
+"""
+ Load the internal state to this objects as specified by @state. Should be implemented by subclass.
+
+ Args:
+ state (dict): Keyword-mapped states of this objects to set
+ """
+ raiseNotImplementedError()
+
+
[docs]defload_state(self,state,serialized=False):
+"""
+ Deserializes and loads this objects' state based on @state
+
+ Args:
+ state (dict or n-array): Either:
+ - Keyword-mapped states of these objects, or
+ - encoded + serialized, 1D numerical np.array capturing this objects' state,
+ where n is @self.state_size
+ serialized (bool): If True, will interpret @state as a 1D numpy array. Otherwise,
+ will assume the input is a (potentially nested) dictionary of states for this objects
+ """
+ state=self.deserialize(state=state)ifserializedelsestate
+ self._load_state(state=state)
+
+ def_serialize(self,state):
+"""
+ Serializes nested dictionary state @state into a flattened 1D numpy array for encoding efficiency.
+ Should be implemented by subclass.
+
+ Args:
+ state (dict): Keyword-mapped states of this objects to encode. Should match structure of output from
+ self._dump_state()
+
+ Returns:
+ n-array: encoded + serialized, 1D numerical np.array capturing this objects's state
+ """
+ raiseNotImplementedError()
+
+
[docs]defserialize(self,state):
+"""
+ Serializes nested dictionary state @state into a flattened 1D numpy array for encoding efficiency.
+ Should be implemented by subclass.
+
+ Args:
+ state (dict): Keyword-mapped states of this objects to encode. Should match structure of output from
+ self._dump_state()
+
+ Returns:
+ n-array: encoded + serialized, 1D numerical np.array capturing this objects's state
+ """
+ # Simply returns self._serialize() for now. this is for future proofing
+ returnself._serialize(state=state)
+
+ def_deserialize(self,state):
+"""
+ De-serializes flattened 1D numpy array @state into nested dictionary state.
+ Should be implemented by subclass.
+
+ Args:
+ state (n-array): encoded + serialized, 1D numerical np.array capturing this objects's state
+
+ Returns:
+ 2-tuple:
+ - dict: Keyword-mapped states of this objects. Should match structure of output from
+ self._dump_state()
+ - int: current index of the flattened state vector that is left off. This is helpful for subclasses
+ that inherit partial deserializations from parent classes, and need to know where the
+ deserialization left off before continuing.
+ """
+ raiseNotImplementedError
+
+
[docs]defdeserialize(self,state):
+"""
+ De-serializes flattened 1D numpy array @state into nested dictionary state.
+ Should be implemented by subclass.
+
+ Args:
+ state (n-array): encoded + serialized, 1D numerical np.array capturing this objects's state
+
+ Returns:
+ dict: Keyword-mapped states of these objects. Should match structure of output from
+ self._dump_state()
+ """
+ # Sanity check the idx with the expected state size
+ state_dict,idx=self._deserialize(state=state)
+ assertidx==self.state_size,f'Invalid state deserialization occurred! Expected {self.state_size} total ' \
+ f'values to be deserialized, only {idx} were.'
+
+ returnstate_dict
+
+
+
[docs]classSerializableNonInstance:
+"""
+ Identical to Serializable, but intended for non-instance classes
+ """
+
+ @ClassProperty
+ defstate_size(self):
+"""
+ Returns:
+ int: Size of this objects's serialized state
+ """
+ raiseNotImplementedError()
+
+ @classmethod
+ def_dump_state(cls):
+"""
+ Dumps the state of this objects in dictionary form (can be empty). Should be implemented by subclass.
+
+ Returns:
+ dict: Keyword-mapped states of this objects
+ """
+ raiseNotImplementedError()
+
+
[docs]@classmethod
+ defdump_state(cls,serialized=False):
+"""
+ Dumps the state of this objects in either dictionary of flattened numerical form.
+
+ Args:
+ serialized (bool): If True, will return the state of this objects as a 1D numpy array. Otherwise,
+ will return a (potentially nested) dictionary of states for this objects
+
+ Returns:
+ dict or n-array: Either:
+ - Keyword-mapped states of these objects, or
+ - encoded + serialized, 1D numerical np.array capturing this objects' state, where n is @self.state_size
+ """
+ state=cls._dump_state()
+ returncls.serialize(state=state)ifserializedelsestate
+
+ @classmethod
+ def_load_state(cls,state):
+"""
+ Load the internal state to this objects as specified by @state. Should be implemented by subclass.
+
+ Args:
+ state (dict): Keyword-mapped states of these objects to set
+ """
+ raiseNotImplementedError()
+
+
[docs]@classmethod
+ defload_state(cls,state,serialized=False):
+"""
+ Deserializes and loads this objects' state based on @state
+
+ Args:
+ state (dict or n-array): Either:
+ - Keyword-mapped states of these objects, or
+ - encoded + serialized, 1D numerical np.array capturing this objects' state,
+ where n is @self.state_size
+ serialized (bool): If True, will interpret @state as a 1D numpy array. Otherwise, will assume the input is
+ a (potentially nested) dictionary of states for this objects
+ """
+ state=cls.deserialize(state=state)ifserializedelsestate
+ cls._load_state(state=state)
+
+ @classmethod
+ def_serialize(cls,state):
+"""
+ Serializes nested dictionary state @state into a flattened 1D numpy array for encoding efficiency.
+ Should be implemented by subclass.
+
+ Args:
+ state (dict): Keyword-mapped states of this objects to encode. Should match structure of output from
+ self._dump_state()
+
+ Returns:
+ n-array: encoded + serialized, 1D numerical np.array capturing this objects's state
+ """
+ raiseNotImplementedError()
+
+
[docs]@classmethod
+ defserialize(cls,state):
+"""
+ Serializes nested dictionary state @state into a flattened 1D numpy array for encoding efficiency.
+ Should be implemented by subclass.
+
+ Args:
+ state (dict): Keyword-mapped states of these objects to encode. Should match structure of output from
+ self._dump_state()
+
+ Returns:
+ n-array: encoded + serialized, 1D numerical np.array capturing this objects's state
+ """
+ # Simply returns self._serialize() for now. this is for future proofing
+ returncls._serialize(state=state)
+
+ @classmethod
+ def_deserialize(cls,state):
+"""
+ De-serializes flattened 1D numpy array @state into nested dictionary state.
+ Should be implemented by subclass.
+
+ Args:
+ state (n-array): encoded + serialized, 1D numerical np.array capturing this objects's state
+
+ Returns:
+ 2-tuple:
+ - dict: Keyword-mapped states of this objects. Should match structure of output from
+ self._dump_state()
+ - int: current index of the flattened state vector that is left off. This is helpful for subclasses
+ that inherit partial deserializations from parent classes, and need to know where the
+ deserialization left off before continuing.
+ """
+ raiseNotImplementedError
+
+
[docs]@classmethod
+ defdeserialize(cls,state):
+"""
+ De-serializes flattened 1D numpy array @state into nested dictionary state.
+ Should be implemented by subclass.
+
+ Args:
+ state (n-array): encoded + serialized, 1D numerical np.array capturing this objects's state
+
+ Returns:
+ dict: Keyword-mapped states of this objects. Should match structure of output from
+ self._dump_state()
+ """
+ # Sanity check the idx with the expected state size
+ state_dict,idx=cls._deserialize(state=state)
+ assertidx==cls.state_size,f'Invalid state deserialization occurred! Expected {cls.state_size} total ' \
+ f'values to be deserialized, only {idx} were.'
+
+ returnstate_dict
+
+
+
[docs]classWrapper:
+"""
+ Base class for all wrapper in OmniGibson
+
+ Args:
+ obj (any): Arbitrary python objects instance to wrap
+ """
+
+ def__init__(self,obj):
+ # Set the internal attributes -- store wrapped obj
+ self.wrapped_obj=obj
+
+ @classmethod
+ defclass_name(cls):
+ returncls.__name__
+
+ def_warn_double_wrap(self):
+"""
+ Utility function that checks if we're accidentally trying to double wrap an scenes
+ Raises:
+ Exception: [Double wrapping scenes]
+ """
+ obj=self.wrapped_obj
+ whileTrue:
+ ifisinstance(obj,Wrapper):
+ ifobj.class_name()==self.class_name():
+ raiseException('Attempted to double wrap with Wrapper: {}'.format(self.__class__.__name__))
+ obj=obj.wrapped_obj
+ else:
+ break
+
+ @property
+ defunwrapped(self):
+"""
+ Grabs unwrapped objects
+
+ Returns:
+ any: The unwrapped objects instance
+ """
+ returnself.wrapped_obj.unwrappedifhasattr(self.wrapped_obj,'unwrapped')elseself.wrapped_obj
+
+ # this method is a fallback option on any methods the original scenes might support
+ def__getattr__(self,attr):
+ # If we're querying wrapped_obj, raise an error
+ ifattr=='wrapped_obj':
+ raiseAttributeError('wrapped_obj attribute not initialized yet!')
+
+ # Sanity check to make sure wrapped obj is not None -- if so, raise error
+ assertself.wrapped_objisnotNone,f'Cannot access attribute {attr} since wrapped_obj is None!'
+
+ # using getattr ensures that both __getattribute__ and __getattr__ (fallback) get called
+ # (see https://stackoverflow.com/questions/3278077/difference-between-getattr-vs-getattribute)
+ orig_attr=getattr(self.wrapped_obj,attr)
+ ifcallable(orig_attr):
+
+ defhooked(*args,**kwargs):
+ result=orig_attr(*args,**kwargs)
+ # prevent wrapped_class from becoming unwrapped
+ ifid(result)==id(self.wrapped_obj):
+ returnself
+ returnresult
+
+ returnhooked
+ else:
+ returnorig_attr
+
+ def__setattr__(self,key,value):
+ # Call setattr on wrapped obj if it has the attribute, otherwise, operate on this objects
+ ifhasattr(self,'wrapped_obj')andself.wrapped_objisnotNoneandhasattr(self.wrapped_obj,key):
+ setattr(self.wrapped_obj,key,value)
+ else:
+ super().__setattr__(key,value)
+
+
+
[docs]defclear():
+"""
+ Clear state tied to singleton classes
+ """
+ NAMES.clear()
+ CLASS_NAMES.clear()
+# Copyright (c) 2022-2024, The ORBIT Project Developers.
+# All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+# yapf: disable
+
+"""Submodule containing utilities for transforming strings and regular expressions."""
+
+importast
+importimportlib
+importinspect
+importre
+fromcollections.abcimportCallable,Sequence
+fromtypingimportAny
+
+"""
+String formatting.
+"""
+
+
+
[docs]defto_camel_case(snake_str:str,to:str='cC')->str:
+"""Converts a string from snake case to camel case.
+
+ Args:
+ snake_str: A string in snake case (i.e. with '_')
+ to: Convention to convert string to. Defaults to "cC".
+
+ Raises:
+ ValueError: Invalid input argument `to`, i.e. not "cC" or "CC".
+
+ Returns:
+ A string in camel-case format.
+ """
+ # check input is correct
+ iftonotin['cC','CC']:
+ msg='to_camel_case(): Choose a valid `to` argument (CC or cC)'
+ raiseValueError(msg)
+ # convert string to lower case and split
+ components=snake_str.lower().split('_')
+ ifto=='cC':
+ # We capitalize the first letter of each component except the first one
+ # with the 'title' method and join them together.
+ returncomponents[0]+''.join(x.title()forxincomponents[1:])
+ else:
+ # Capitalize first letter in all the components
+ return''.join(x.title()forxincomponents)
+
+
+
[docs]defto_snake_case(camel_str:str)->str:
+"""Converts a string from camel case to snake case.
+
+ Args:
+ camel_str: A string in camel case.
+
+ Returns:
+ A string in snake case (i.e. with '_')
+ """
+ camel_str=re.sub('(.)([A-Z][a-z]+)',r'\1_\2',camel_str)
+ returnre.sub('([a-z0-9])([A-Z])',r'\1_\2',camel_str).lower()
[docs]defis_lambda_expression(name:str)->bool:
+"""Checks if the input string is a lambda expression.
+
+ Args:
+ name: The input string.
+
+ Returns:
+ Whether the input string is a lambda expression.
+ """
+ try:
+ ast.parse(name)
+ returnisinstance(ast.parse(name).body[0],ast.Expr)andisinstance(ast.parse(name).body[0].value,ast.Lambda)
+ exceptSyntaxError:
+ returnFalse
+
+
+
[docs]defcallable_to_string(value:Callable)->str:
+"""Converts a callable object to a string.
+
+ Args:
+ value: A callable object.
+
+ Raises:
+ ValueError: When the input argument is not a callable object.
+
+ Returns:
+ A string representation of the callable object.
+ """
+ # check if callable
+ ifnotcallable(value):
+ raiseValueError(f'The input argument is not callable: {value}.')
+ # check if lambda function
+ ifvalue.__name__=='<lambda>':
+ returnf"lambda {inspect.getsourcelines(value)[0][0].strip().split('lambda')[1].strip().split(',')[0]}"
+ else:
+ # get the module and function name
+ module_name=value.__module__
+ function_name=value.__name__
+ # return the string
+ returnf'{module_name}:{function_name}'
+
+
+
[docs]defstring_to_callable(name:str)->Callable:
+"""Resolves the module and function names to return the function.
+
+ Args:
+ name: The function name. The format should be 'module:attribute_name' or a
+ lambda expression of format: 'lambda x: x'.
+
+ Raises:
+ ValueError: When the resolved attribute is not a function.
+ ValueError: When the module cannot be found.
+
+ Returns:
+ Callable: The function loaded from the module.
+ """
+ try:
+ ifis_lambda_expression(name):
+ callable_object=eval(name)
+ else:
+ mod_name,attr_name=name.split(':')
+ mod=importlib.import_module(mod_name)
+ callable_object=getattr(mod,attr_name)
+ # check if attribute is callable
+ ifcallable(callable_object):
+ returncallable_object
+ else:
+ raiseAttributeError(f"The imported object is not callable: '{name}'")
+ except(ValueError,ModuleNotFoundError)ase:
+ msg=(
+ f"Could not resolve the input string '{name}' into callable object."
+ " The format of input should be 'module:attribute_name'.\n"
+ f'Received the error:\n{e}.'
+ )
+ raiseValueError(msg)
+
+
+"""
+Regex operations.
+"""
+
+
+
[docs]defresolve_matching_names(
+ keys:str|Sequence[str],list_of_strings:Sequence[str],preserve_order:bool=False
+)->tuple[list[int],list[str]]:
+"""Match a list of query regular expressions against a list of strings and return the matched indices and names.
+
+ When a list of query regular expressions is provided, the function checks each target string against each
+ query regular expression and returns the indices of the matched strings and the matched strings.
+
+ If the :attr:`preserve_order` is True, the ordering of the matched indices and names is the same as the order
+ of the provided list of strings. This means that the ordering is dictated by the order of the target strings
+ and not the order of the query regular expressions.
+
+ If the :attr:`preserve_order` is False, the ordering of the matched indices and names is the same as the order
+ of the provided list of query regular expressions.
+
+ For example, consider the list of strings is ['a', 'b', 'c', 'd', 'e'] and the regular expressions are ['a|c', 'b'].
+ If :attr:`preserve_order` is False, then the function will return the indices of the matched strings and the
+ strings as: ([0, 1, 2], ['a', 'b', 'c']). When :attr:`preserve_order` is True, it will return them as:
+ ([0, 2, 1], ['a', 'c', 'b']).
+
+ Note:
+ The function does not sort the indices. It returns the indices in the order they are found.
+
+ Args:
+ keys: A regular expression or a list of regular expressions to match the strings in the list.
+ list_of_strings: A list of strings to match.
+ preserve_order: Whether to preserve the order of the query keys in the returned values. Defaults to False.
+
+ Returns:
+ A tuple of lists containing the matched indices and names.
+
+ Raises:
+ ValueError: When multiple matches are found for a string in the list.
+ ValueError: When not all regular expressions are matched.
+ """
+ # resolve name keys
+ ifisinstance(keys,str):
+ keys=[keys]
+ # find matching patterns
+ index_list=[]
+ names_list=[]
+ key_idx_list=[]
+ # book-keeping to check that we always have a one-to-one mapping
+ # i.e. each target string should match only one regular expression
+ target_strings_match_found=[Nonefor_inrange(len(list_of_strings))]
+ keys_match_found=[[]for_inrange(len(keys))]
+ # loop over all target strings
+ fortarget_index,potential_match_stringinenumerate(list_of_strings):
+ forkey_index,re_keyinenumerate(keys):
+ ifre.fullmatch(re_key,potential_match_string):
+ # check if match already found
+ iftarget_strings_match_found[target_index]:
+ raiseValueError(
+ f"Multiple matches for '{potential_match_string}':"
+ f" '{target_strings_match_found[target_index]}' and '{re_key}'!"
+ )
+ # add to list
+ target_strings_match_found[target_index]=re_key
+ index_list.append(target_index)
+ names_list.append(potential_match_string)
+ key_idx_list.append(key_index)
+ # add for regex key
+ keys_match_found[key_index].append(potential_match_string)
+ # reorder keys if they should be returned in order of the query keys
+ ifpreserve_order:
+ reordered_index_list=[None]*len(index_list)
+ global_index=0
+ forkey_indexinrange(len(keys)):
+ forkey_idx_position,key_idx_entryinenumerate(key_idx_list):
+ ifkey_idx_entry==key_index:
+ reordered_index_list[key_idx_position]=global_index
+ global_index+=1
+ # reorder index and names list
+ index_list_reorder=[None]*len(index_list)
+ names_list_reorder=[None]*len(index_list)
+ foridx,reorder_idxinenumerate(reordered_index_list):
+ index_list_reorder[reorder_idx]=index_list[idx]
+ names_list_reorder[reorder_idx]=names_list[idx]
+ # update
+ index_list=index_list_reorder
+ names_list=names_list_reorder
+ # check that all regular expressions are matched
+ ifnotall(keys_match_found):
+ # make this print nicely aligned for debugging
+ msg='\n'
+ forkey,valueinzip(keys,keys_match_found):
+ msg+=f'\t{key}: {value}\n'
+ msg+=f'Available strings: {list_of_strings}\n'
+ # raise error
+ raiseValueError(
+ f'Not all regular expressions are matched! Please check that the regular expressions are correct: {msg}'
+ )
+ # return
+ returnindex_list,names_list
+
+
+
[docs]defresolve_matching_names_values(
+ data:dict[str,Any],list_of_strings:Sequence[str],preserve_order:bool=False
+)->tuple[list[int],list[str],list[Any]]:
+"""Match a list of regular expressions in a dictionary against a list of strings and return
+ the matched indices, names, and values.
+
+ If the :attr:`preserve_order` is True, the ordering of the matched indices and names is the same as the order
+ of the provided list of strings. This means that the ordering is dictated by the order of the target strings
+ and not the order of the query regular expressions.
+
+ If the :attr:`preserve_order` is False, the ordering of the matched indices and names is the same as the order
+ of the provided list of query regular expressions.
+
+ For example, consider the dictionary is {"a|d|e": 1, "b|c": 2}, the list of strings is ['a', 'b', 'c', 'd', 'e'].
+ If :attr:`preserve_order` is False, then the function will return the indices of the matched strings, the
+ matched strings, and the values as: ([0, 1, 2, 3, 4], ['a', 'b', 'c', 'd', 'e'], [1, 2, 2, 1, 1]). When
+ :attr:`preserve_order` is True, it will return them as: ([0, 3, 4, 1, 2], ['a', 'd', 'e', 'b', 'c'], [1, 1, 1, 2, 2]).
+
+ Args:
+ data: A dictionary of regular expressions and values to match the strings in the list.
+ list_of_strings: A list of strings to match.
+ preserve_order: Whether to preserve the order of the query keys in the returned values. Defaults to False.
+
+ Returns:
+ A tuple of lists containing the matched indices, names, and values.
+
+ Raises:
+ TypeError: When the input argument :attr:`data` is not a dictionary.
+ ValueError: When multiple matches are found for a string in the dictionary.
+ ValueError: When not all regular expressions in the data keys are matched.
+ """
+ # check valid input
+ ifnotisinstance(data,dict):
+ raiseTypeError(f'Input argument `data` should be a dictionary. Received: {data}')
+ # find matching patterns
+ index_list=[]
+ names_list=[]
+ values_list=[]
+ key_idx_list=[]
+ # book-keeping to check that we always have a one-to-one mapping
+ # i.e. each target string should match only one regular expression
+ target_strings_match_found=[Nonefor_inrange(len(list_of_strings))]
+ keys_match_found=[[]for_inrange(len(data))]
+ # loop over all target strings
+ fortarget_index,potential_match_stringinenumerate(list_of_strings):
+ forkey_index,(re_key,value)inenumerate(data.items()):
+ ifre.fullmatch(re_key,potential_match_string):
+ # check if match already found
+ iftarget_strings_match_found[target_index]:
+ raiseValueError(
+ f"Multiple matches for '{potential_match_string}':"
+ f" '{target_strings_match_found[target_index]}' and '{re_key}'!"
+ )
+ # add to list
+ target_strings_match_found[target_index]=re_key
+ index_list.append(target_index)
+ names_list.append(potential_match_string)
+ values_list.append(value)
+ key_idx_list.append(key_index)
+ # add for regex key
+ keys_match_found[key_index].append(potential_match_string)
+ # reorder keys if they should be returned in order of the query keys
+ ifpreserve_order:
+ reordered_index_list=[None]*len(index_list)
+ global_index=0
+ forkey_indexinrange(len(data)):
+ forkey_idx_position,key_idx_entryinenumerate(key_idx_list):
+ ifkey_idx_entry==key_index:
+ reordered_index_list[key_idx_position]=global_index
+ global_index+=1
+ # reorder index and names list
+ index_list_reorder=[None]*len(index_list)
+ names_list_reorder=[None]*len(index_list)
+ values_list_reorder=[None]*len(index_list)
+ foridx,reorder_idxinenumerate(reordered_index_list):
+ index_list_reorder[reorder_idx]=index_list[idx]
+ names_list_reorder[reorder_idx]=names_list[idx]
+ values_list_reorder[reorder_idx]=values_list[idx]
+ # update
+ index_list=index_list_reorder
+ names_list=names_list_reorder
+ values_list=values_list_reorder
+ # check that all regular expressions are matched
+ ifnotall(keys_match_found):
+ # make this print nicely aligned for debugging
+ msg='\n'
+ forkey,valueinzip(data.keys(),keys_match_found):
+ msg+=f'\t{key}: {value}\n'
+ msg+=f'Available strings: {list_of_strings}\n'
+ # raise error
+ raiseValueError(
+ f'Not all regular expressions are matched! Please check that the regular expressions are correct: {msg}'
+ )
+ # return
+ returnindex_list,names_list,values_list
+fromtypingimportAny,Dict,List,Optional
+
+frompydanticimportBaseModel
+
+
+classMetaActionData(BaseModel):
+"""
+ action status in tao_yuan
+ """
+ controller:str
+ data:Any
+
+
+classActionData(BaseModel):
+"""
+ action status in tao_yuan
+ """
+ robot:str
+ controllers:List[MetaActionData]
+
+
+class_IsaacData(BaseModel):
+"""
+ isaac status in tao_yuan
+ """
+ actions:Optional[List[Dict[str,Any]]]
+ obs:Optional[List[Dict[str,Any]]]
+
+
+
[docs]classIsaacData:
+"""
+ isaac status in tao_yuan
+
+ There are two types of isaac status:
+
+ * Action
+ * Observation
+
+ structure of isaac status like this::
+
+ {
+ actions: {
+ [
+ {
+ robot_1: {
+ cap: param,
+ }
+ }
+ ]
+ },
+ observations: {
+ [
+ {
+ robot_1: {
+ obs_1: data,
+ obs_2: data
+ }
+ }
+ ]
+ }
+ }
+
+ """
+ data=_IsaacData(actions=[],obs=[])
+
+ def__init__(self)->None:
+ pass
+
+ @classmethod
+ defget_all(cls)->_IsaacData:
+ returncls.data
+
+ # Observation
+ @classmethod
+ defset_obs_data(cls,obs:List[Dict[str,Any]])->None:
+ cls.data.obs=obs
+
+
[docs]@classmethod
+ defget_obs(cls)->List[Dict[str,Any]]:
+"""
+ Get isaac observation data
+
+ Returns:
+ isaac observation data list
+ """
+ returncls.data.obs
+
+
[docs]@classmethod
+ defget_obs_by_id(cls,task_id:int)->Dict[str,Any]:
+"""
+ Get isaac observation by id
+
+ Args:
+ task_id: isaac task id
+
+ Returns:
+ isaac observation data
+
+ """
+ returncls.data.obs[task_id]
+"""
+Includes web api endpoints
+"""
+fromtypingimportAny,Dict,List
+
+importhttpx
+
+fromtao_yuan.core.datahub.isaac_dataimportActionData
+
+# constants
+WebBEUrl='http://127.0.0.1:9000'# TODO config this
+GetAllObsPath=WebBEUrl+'/api/stream/get_all_obs'
+GetObsByIdPath=WebBEUrl+'/api/stream/get_obs_by_id/'
+FlushObsUrl=WebBEUrl+'/api/isaac/flush_obs_data'
+SetActionsUrl=WebBEUrl+'/api/isaac/set_action'
+GetAllActionUrl=WebBEUrl+'/api/isaac/get_actions'
+GetActionByIdUrl=WebBEUrl+'/api/isaac/get_action_by_id/'
+
+
+
[docs]defget_all_obs()->List[Dict[str,Any]]|None:
+"""
+ Get all observation data
+ Returns:
+ obs (List[Dict[str, Any]]): List of all observation data
+ """
+ r=httpx.get(GetAllObsPath)
+ ifr.status_code==200:
+ returnr.json()
+ returnNone
+
+
+
[docs]defget_obs_by_id(task_id:int)->Any|None:
+"""
+ Get observation by id
+ Args:
+ task_id (int): id of observation data
+
+ Returns:
+ obs (Any): Observation data
+ """
+ r=httpx.get(GetObsByIdPath+str(task_id))
+ ifr.status_code==200:
+ returnr.json()
+
+
+
[docs]defset_obs_data(obs:List[Dict[str,Any]])->bool:
+"""
+ Set observation data web API
+ Args:
+ obs (List[Dict[str, Any]]): isaac observation data
+
+ Returns:
+ OK if set successfully
+ """
+ r=httpx.post(FlushObsUrl,json=obs,timeout=1)
+ ifr.status_code==200andr.json()['msg']=='OK':
+ returnTrue
+ returnFalse
[docs]defsend_chain_of_thought(cot:str,uuid:str='none')->None:
+"""
+ chain of thought data
+
+ Args:
+ uuid (str): uuid of chain of thought data, defaults to "none".
+ cot (str): chain of thought data.
+ """
+
+ defcot_format(x):
+ return{'type':'text','value':x}
+
+ res_data=[{'type':'time','value':datetime.datetime.now().strftime('%H:%M')}]
+ foriincot:
+ res_data.append(cot_format(i))
+ AsyncRequest.post(uuid,SendCOTUrl,res_data)
+
+
+
[docs]defsend_chat_control(nickname:str,text:str,img:str=None,role:str='user',uuid:str='none')->None:
+"""Send a new message to the chatbox.
+
+ Args:
+ nickname (str): nickname displayed in the chatbox.
+ text (str): text to send to the chatbox.
+ img (str, optional): image to send to the chatbox. Defaults to None.
+ role (str, optional): role name, user or agent. Defaults to "user".
+ uuid (str, optional): uuid of the message. Defaults to 'none'.
+ """
+ avatar_url=AvatarUrls.get(role,DefaultAvatarUrl)
+ res_data={
+ 'type':role,
+ 'name':nickname,
+ 'time':datetime.datetime.now().strftime('%H:%M'),
+ 'message':text,
+ 'photo':avatar_url,
+ 'img':img,
+ }
+ AsyncRequest.post(uuid,SendChatControlUrl,res_data)
[docs]defstep(self,actions:List[Dict[str,Any]])->List[Dict[str,Any]]:
+"""
+ run step with given action(with isaac step)
+
+ Args:
+ actions (List[Dict[str, Any]]): action(with isaac step)
+
+ Returns:
+ List[Dict[str, Any]]: observations(with isaac step)
+ """
+ iflen(actions)!=len(self.config.tasks):
+ raiseAssertionError('len of action list is not equal to len of task list')
+ _actions=[]
+ foraction_idx,actioninenumerate(actions):
+ _action={}
+ fork,vinaction.items():
+ _action[f'{k}_{action_idx}']=v
+ _actions.append(_action)
+ action_after_reshape={
+ self.config.tasks[action_idx].name:action
+ foraction_idx,actioninenumerate(_actions)
+ }
+
+ # log.debug(action_after_reshape)
+ self._runner.step(action_after_reshape)
+ observations=self.get_observations()
+ returnobservations
+
+
[docs]defreset(self,envs:List[int]=None):
+"""
+ reset the environment(use isaac word reset)
+
+ Args:
+ envs (List[int]): env need to be reset(default for reset all envs)
+ """
+ ifenvsisnotNone:
+ iflen(envs)==0:
+ return
+ log.debug(f'============= reset: {envs} ==============')
+ # int -> name
+ self._runner.reset([self.config.tasks[e].nameforeinenvs])
+ returnself.get_observations(),{}
+ self._runner.reset()
+ returnself.get_observations(),{}
+
+
[docs]defget_observations(self)->List[Dict[str,Any]]:
+"""
+ Get observations from Isaac environment
+ Returns:
+ List[Dict[str, Any]]: observations
+ """
+ _obs=self._runner.get_obs()
+ return_obs
+
+ defrender(self,mode='human'):
+ return
+
+
[docs]defclose(self):
+"""close the environment"""
+ self._simulation_app.close()
+ return
[docs]defget_obs(self)->dict:
+"""Get observation of robot, including controllers, sensors, and world pose.
+
+ Raises:
+ NotImplementedError: _description_
+ """
+ raiseNotImplementedError()
+
+
[docs]defget_robot_ik_base(self)->RigidPrim:
+"""Get base link of ik controlled parts.
+
+ Returns:
+ RigidPrim: rigid prim of ik base link.
+ """
+ raiseNotImplementedError()
+
+
[docs]defget_robot_base(self)->RigidPrim:
+"""
+ Get base link of robot.
+
+ Returns:
+ RigidPrim: rigid prim of robot base link.
+ """
+ raiseNotImplementedError()
+
+
[docs]defget_robot_scale(self)->np.ndarray:
+"""Get robot scale.
+
+ Returns:
+ np.ndarray: robot scale in (x, y, z).
+ """
+ returnself.isaac_robot.get_local_scale()
[docs]classBaseSensor(ABC):
+"""Base class of sensor."""
+ sensors={}
+
+ def__init__(self,config:SensorModel,robot:BaseRobot,scene:Scene):
+"""Initialize the sensor.
+
+ Args:
+ config (SensorModel): merged config (from user config and robot model) of the sensor.
+ robot (BaseRobot): robot owning the sensor.
+ scene (Scene): scene from isaac sim.
+ """
+ ifconfig.nameisNone:
+ raiseValueError('must specify sensor name.')
+ self.name=config.name
+ self.config=config
+ self._scene=scene
+ self._robot=robot
+
+ @abstractmethod
+ defsensor_init(self):
+ raiseNotImplementedError()
+
+
[docs]@abstractmethod
+ defget_data(self)->Dict:
+"""Get data from sensor.
+
+ Returns:
+ Dict: data dict of sensor.
+ """
+ raiseNotImplementedError()
+
+
[docs]@classmethod
+ defregister(cls,name:str):
+"""
+ Register a sensor class with the given name(decorator).
+ Args:
+ name(str): name of the sensor class.
+ """
+
+ defdecorator(sensor_class):
+ cls.sensors[name]=sensor_class
+
+ @wraps(sensor_class)
+ defwrapped_function(*args,**kwargs):
+ returnsensor_class(*args,**kwargs)
+
+ returnwrapped_function
+
+ returndecorator
+
+
+
[docs]defconfig_inject(params:SensorParams,model:SensorModel)->SensorModel:
+"""Merge sensor config from user config and robot model.
+
+ Args:
+ params (SensorParams): user config.
+ model (SensorModel): sensor config from robot model.
+
+ Returns:
+ SensorModel: merged sensor config.
+ """
+ ifparamsisNone:
+ returnmodel
+ config=model.dict()
+ user=params.dict()
+ fork,vinuser.items():
+ ifvisnotNone:
+ config[k]=v
+ conf=SensorModel(**config)
+
+ returnconf
+
+
+
[docs]defcreate_sensors(config:RobotUserConfig,robot_model:RobotModel,robot:BaseRobot,
+ scene:Scene)->Dict[str,BaseSensor]:
+"""Create all sensors of one robot.
+
+ Args:
+ config (RobotUserConfig): user config of the robot.
+ robot_model (RobotModel): model of the robot.
+ robot (BaseRobot): robot instance.
+ scene (Scene): scene from isaac sim.
+
+ Returns:
+ Dict[str, BaseSensor]: dict of sensors with sensor name as key.
+ """
+ sensor_map={}
+ ifrobot_model.sensorsisnotNone:
+ available_sensors={a.name:aforainrobot_model.sensors}
+ forsensor_name,sensorinavailable_sensors.items():
+ ifsensor.typenotinBaseSensor.sensors:
+ raiseKeyError(f'unknown sensor type "{sensor.type}"')
+ sensor_cls=BaseSensor.sensors[sensor.type]
+ # Find if user param exists for this sensor.
+ param=None
+ ifconfig.sensor_paramsisnotNone:
+ forpinconfig.sensor_params:
+ ifp.name==sensor_name:
+ param=p
+ break
+
+ sensor_ins=sensor_cls(config=config_inject(param,sensor),robot=robot,name=sensor_name,scene=scene)
+ sensor_map[sensor_name]=sensor_ins
+ sensor_ins.sensor_init()
+ log.debug(f'==================== {sensor_name} loaded==========================')
+
+ returnsensor_map
[docs]defadd_usd_ref(source_stage:Usd.Stage,dest_stage:Usd.Stage,src_prim_path:str,dest_prim_path:str)->None:
+"""
+ Add an opened usd into another usd as a reference
+ set name in dest_prim_path
+
+ Args:
+ source_stage (Usd.Stage): source stage
+ dest_stage (Usd.Stage): dest stage
+ src_prim_path (str): source prim path
+ dest_prim_path (str): dest prim path
+ """
+ src_root_layer=source_stage.GetRootLayer()
+ log.debug(src_root_layer.identifier)
+ source_prim=source_stage.GetPrimAtPath(src_prim_path)
+ dest_prim=dest_stage.DefinePrim(dest_prim_path,source_prim.GetTypeName())
+ dest_prim.GetReferences().AddReference(src_root_layer.identifier)
+ dest_stage.GetRootLayer().Save()
+
+
+
[docs]defget_local_transform_xform(prim:Usd.Prim)->typing.Tuple[Gf.Vec3d,Gf.Rotation,Gf.Vec3d]:
+"""
+ Get the local transformation of a prim using Xformable.
+
+ Args:
+ prim: The prim to calculate the local transformation.
+ Returns:
+ A tuple of:
+ - Translation vector.
+ - Rotation quaternion, i.e. 3d vector plus angle.
+ - Scale vector.
+ """
+ xform=UsdGeom.Xformable(prim)
+ local_transformation:Gf.Matrix4d=xform.GetLocalTransformation()
+ translation:Gf.Vec3d=local_transformation.ExtractTranslation()
+ rotation:Gf.Rotation=local_transformation.ExtractRotation()
+ scale:Gf.Vec3d=Gf.Vec3d(*(v.GetLength()forvinlocal_transformation.ExtractRotationMatrix()))
+ returntranslation,rotation,scale
+
+
+
[docs]defget_world_transform_xform(prim:Usd.Prim)->typing.Tuple[Gf.Vec3d,Gf.Rotation,Gf.Vec3d]:
+"""
+ Get the local transformation of a prim using Xformable.
+
+ Args:
+ prim: The prim to calculate the world transformation.
+ Returns:
+ A tuple of:
+ - Translation vector.
+ - Rotation quaternion, i.e. 3d vector plus angle.
+ - Scale vector.
+ """
+ xform=UsdGeom.Xformable(prim)
+ time=Usd.TimeCode.Default()
+ world_transform:Gf.Matrix4d=xform.ComputeLocalToWorldTransform(time)
+ translation:Gf.Vec3d=world_transform.ExtractTranslation()
+ rotation:Gf.Rotation=world_transform.ExtractRotation()
+ scale:Gf.Vec3d=Gf.Vec3d(*(v.GetLength()forvinworld_transform.ExtractRotationMatrix()))
+ returntranslation,rotation,scale
+
+
+
[docs]defcreate_new_usd(new_usd_path:str,default_prim_name:str,default_axis:str=None)->Usd.Stage:
+"""
+ Create a new usd
+
+ Args:
+ new_usd_path (str): where to place this new usd
+ default_prim_name (str): default prim name (root prim path)
+ default_axis (str): default axis for new usd
+ """
+ stage:Usd.Stage=Usd.Stage.CreateNew(new_usd_path)
+ default_prim:Usd.Prim=UsdGeom.Xform.Define(stage,Sdf.Path('/'+default_prim_name)).GetPrim()
+ _set_default_prim(stage,default_prim)
+ _set_up_axis(stage,default_axis)
+ stage.GetRootLayer().Save()
+ returnstage
+
+
+def_set_up_axis(stage:Usd.Stage,axis_str:str=None)->None:
+"""
+ Set default axis for a stage
+
+ Args:
+ stage (Usd.Stage): objects stage
+ axis_str (str, optional): axis str, 'y' or 'z', set 'z' if None. Defaults to None.
+ """
+ ifaxis_str=='y'oraxis_str=='Y':
+ axis:UsdGeom.Tokens=UsdGeom.Tokens.y
+ else:
+ axis:UsdGeom.Tokens=UsdGeom.Tokens.z
+ UsdGeom.SetStageUpAxis(stage,axis)
+
+
+def_set_default_prim(stage:Usd.Stage,prim:Usd.Prim)->None:
+"""
+ Set default prim for a stage
+
+ Args:
+ stage (Usd.Stage): objects stage
+ prim (Usd.Prim): prim in this stage
+ """
+ stage.SetDefaultPrim(prim)
+
+
+
[docs]defcompute_bbox(prim:Usd.Prim)->Gf.Range3d:
+"""
+ Compute Bounding Box using ComputeWorldBound at UsdGeom.Imageable
+
+ Args:
+ prim: A prim to compute the bounding box.
+ Returns:
+ A range (i.e. bounding box)
+ """
+ imageable:UsdGeom.Imageable=UsdGeom.Imageable(prim)
+ time=Usd.TimeCode.Default()
+ bound=imageable.ComputeWorldBound(time,UsdGeom.Tokens.default_)
+ bound_range=bound.ComputeAlignedBox()
+ returnbound_range
+
+
+
[docs]defdelete_prim_in_stage(stage:Usd.Stage,prim:Usd.Prim)->None:
+"""
+ Delete a prim in stage
+
+ Args:
+ stage (Usd.Stage): objects stage
+ prim (Usd.Prim): prim to be deleted
+ """
+ stage.RemovePrim(prim.GetPrimPath())
+
+
+
[docs]defset_xform_of_prim(prim:Usd.Prim,xform_op:str,set_valve:typing.Any)->None:
+"""
+ Set xform data of a prim with new data
+
+ Args:
+ prim (Usd.Prim): objects prim
+ xform_op (str): which op to be set
+ set_valve (typing.Any): new data to be set, could be np.array
+ """
+ stage=prim.GetStage()
+ op_list=prim.GetAttribute('xformOpOrder').Get()
+ s=None
+ foriinop_list:
+ ifxform_op==i:
+ log.debug(prim.GetAttribute(i))
+ s=prim.GetAttribute(i)
+ trans=s.Get()
+ trans_value=set_valve
+ data_class=type(trans)
+ time_code=Usd.TimeCode.Default()
+ new_data=data_class(*trans_value)
+ s.Set(new_data,time_code)
+ stage.Save()
+
+
+
[docs]defdelete_xform_of_prim(prim:Usd.Prim,xform_op:str)->None:
+"""
+ Delete xform data of a prim
+
+ Args:
+ prim (Usd.Prim): objects prim
+ xform_op (str): which op to be deleted
+ """
+ stage=prim.GetStage()
+ ifprim.HasAttribute(xform_op):
+ # Clear the attribute from the Prim
+ prim.GetAttribute(xform_op).Clear()
+ stage.Save()
+
+
+
[docs]defadd_xform_of_prim(prim:Usd.Prim,xform_op:str,set_valve:typing.Any)->None:
+"""
+ Add xform data of a prim with new data
+
+ Args:
+ prim (Usd.Prim): objects prim
+ xform_op (str): which op to be set
+ set_valve (typing.Any): new data to be set, could be Gf.Vec3d, Gf.Rotation
+ """
+ stage=prim.GetStage()
+ attribute_name=xform_op
+ attribute_value=set_valve
+ opType=get_xformop_type(xform_op)
+ precision=get_xformop_precision('float')
+ attribute=UsdGeom.Xformable(prim).AddXformOp(opType,precision)
+ ifattribute:
+ attribute.Set(attribute_value)
+ # log.debug(f"Attribute {attribute_name} has been set to {attribute_value}.")
+ else:
+ log.debug(f'Failed to create attribute named {attribute_name}.')
+ stage.Save()
+
+
+
[docs]defadd_xform_of_prim_old(prim:Usd.Prim,xform_op:str,set_valve:typing.Any)->None:
+"""
+ Add xform data of a prim with new data
+
+ Args:
+ prim (Usd.Prim): objects prim
+ xform_op (str): which op to be set
+ set_valve (typing.Any): new data to be set, could be Gf.Vec3d, Gf.Rotation
+ """
+ stage=prim.GetStage()
+ attribute_name=xform_op
+ attribute_value=set_valve
+ if'3'intype(set_valve).__name__:
+ attribute_type=Sdf.ValueTypeNames.Float3
+ else:
+ attribute_type=Sdf.ValueTypeNames.Float
+ attribute=prim.CreateAttribute(attribute_name,attribute_type)
+ ifattribute:
+ attribute.Set(attribute_value)
+ # log.debug(f"Attribute {attribute_name} has been set to {attribute_value}.")
+ else:
+ log.debug(f'Failed to create attribute named {attribute_name}.')
+ stage.Save()
[docs]@abstractmethod
+ defis_done(self)->bool:
+"""
+ Returns True of the task is done.
+
+ Raises:
+ NotImplementedError: this must be overridden.
+ """
+ raiseNotImplementedError
+
+
[docs]defindividual_reset(self):
+"""
+ reload this task individually without reloading whole world.
+ """
+ raiseNotImplementedError
[docs]defpost_reset(self)->None:
+"""Calls while doing a .reset() on the world."""
+ self.steps=0
+ forrobotinself.robots.values():
+ robot.post_reset()
+ return
+
+
[docs]@classmethod
+ defregister(cls,name:str):
+"""
+ Register a task with its name(decorator).
+ Args:
+ name(str): name of the task
+ """
+
+ defdecorator(tasks_class):
+ cls.tasks[name]=tasks_class
+
+ @wraps(tasks_class)
+ defwrapped_function(*args,**kwargs):
+ returntasks_class(*args,**kwargs)
+
+ returnwrapped_function
+
+ returndecorator
+# Copyright (c) 2022-2024, The ORBIT Project Developers.
+# All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+"""Sub-module containing utilities for working with different array backends."""
+
+fromtypingimportUnion
+
+importnumpyasnp
+importtorch
+importwarpaswp
+
+TensorData=Union[np.ndarray,torch.Tensor,wp.array]
+"""Type definition for a tensor data.
+
+Union of numpy, torch, and warp arrays.
+"""
+
+TENSOR_TYPES={
+ 'numpy':np.ndarray,
+ 'torch':torch.Tensor,
+ 'warp':wp.array,
+}
+"""A dictionary containing the types for each backend.
+
+The keys are the name of the backend ("numpy", "torch", "warp") and the values are the corresponding type
+(``np.ndarray``, ``torch.Tensor``, ``wp.array``).
+"""
+
+TENSOR_TYPE_CONVERSIONS={
+ 'numpy':{
+ wp.array:lambdax:x.numpy(),
+ torch.Tensor:lambdax:x.detach().cpu().numpy()
+ },
+ 'torch':{
+ wp.array:lambdax:wp.torch.to_torch(x),
+ np.ndarray:lambdax:torch.from_numpy(x)
+ },
+ 'warp':{
+ np.array:lambdax:wp.array(x),
+ torch.Tensor:lambdax:wp.torch.from_torch(x)
+ },
+}
+"""A nested dictionary containing the conversion functions for each backend.
+
+The keys of the outer dictionary are the name of target backend ("numpy", "torch", "warp"). The keys of the
+inner dictionary are the source backend (``np.ndarray``, ``torch.Tensor``, ``wp.array``).
+"""
+
+
+
[docs]defconvert_to_torch(
+ array:TensorData,
+ dtype:torch.dtype=None,
+ device:torch.device|str|None=None,
+)->torch.Tensor:
+"""Converts a given array into a torch tensor.
+
+ The function tries to convert the array to a torch tensor. If the array is a numpy/warp arrays, or python
+ list/tuples, it is converted to a torch tensor. If the array is already a torch tensor, it is returned
+ directly.
+
+ If ``device`` is None, then the function deduces the current device of the data. For numpy arrays,
+ this defaults to "cpu", for torch tensors it is "cpu" or "cuda", and for warp arrays it is "cuda".
+
+ Note:
+ Since PyTorch does not support unsigned integer types, unsigned integer arrays are converted to
+ signed integer arrays. This is done by casting the array to the corresponding signed integer type.
+
+ Args:
+ array: The input array. It can be a numpy array, warp array, python list/tuple, or torch tensor.
+ dtype: Target data-type for the tensor.
+ device: The target device for the tensor. Defaults to None.
+
+ Returns:
+ The converted array as torch tensor.
+ """
+ # Convert array to tensor
+ # if the datatype is not currently supported by torch we need to improvise
+ # supported types are: https://pytorch.org/docs/stable/tensors.html
+ ifisinstance(array,torch.Tensor):
+ tensor=array
+ elifisinstance(array,np.ndarray):
+ ifarray.dtype==np.uint32:
+ array=array.astype(np.int32)
+ # need to deal with object arrays (np.void) separately
+ tensor=torch.from_numpy(array)
+ elifisinstance(array,wp.array):
+ ifarray.dtype==wp.uint32:
+ array=array.view(wp.int32)
+ tensor=wp.to_torch(array)
+ else:
+ tensor=torch.Tensor(array)
+ # Convert tensor to the right device
+ ifdeviceisnotNoneandstr(tensor.device)!=str(device):
+ tensor=tensor.to(device)
+ # Convert dtype of tensor if requested
+ ifdtypeisnotNoneandtensor.dtype!=dtype:
+ tensor=tensor.type(dtype)
+
+ returntensor
+# Copyright (c) 2022-2024, The ORBIT Project Developers.
+# All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+"""Sub-module that defines the host-server where assets and resources are stored.
+
+By default, we use the Isaac Sim Nucleus Server for hosting assets and resources. This makes
+distribution of the assets easier and makes the repository smaller in size code-wise.
+
+For more information, please check information on `Omniverse Nucleus`_.
+
+.. _Omniverse Nucleus: https://docs.omniverse.nvidia.com/nucleus/latest/overview/overview.html
+"""
+
+importio
+importos
+importtempfile
+fromtypingimportLiteral
+
+importcarb
+importomni.client
+importomni.isaac.core.utils.nucleusasnucleus_utils
+
+# get assets root path
+# note: we check only once at the start of the module to prevent multiple checks on the Nucleus Server
+NUCLEUS_ASSET_ROOT_DIR=nucleus_utils.get_assets_root_path()
+"""Path to the root directory on the Nucleus Server.
+
+This is resolved using Isaac Sim's Nucleus API. If the Nucleus Server is not running, then this
+will be set to None. The path is resolved using the following steps:
+
+1. Based on simulation parameter: ``/persistent/isaac/asset_root/default``.
+2. Iterating over all the connected Nucleus Servers and checking for the first server that has the
+ the connected status.
+3. Based on simulation parameter: ``/persistent/isaac/asset_root/cloud``.
+"""
+
+# check nucleus connection
+ifNUCLEUS_ASSET_ROOT_DIRisNone:
+ msg=(
+ 'Unable to perform Nucleus login on Omniverse. Assets root path is not set.\n'
+ '\tPlease check: https://docs.omniverse.nvidia.com/app_isaacsim/app_isaacsim/overview.html#omniverse-nucleus')
+ carb.log_error(msg)
+ raiseRuntimeError(msg)
+
+NVIDIA_NUCLEUS_DIR=f'{NUCLEUS_ASSET_ROOT_DIR}/NVIDIA'
+"""Path to the root directory on the NVIDIA Nucleus Server."""
+
+ISAAC_NUCLEUS_DIR=f'{NUCLEUS_ASSET_ROOT_DIR}/Isaac'
+"""Path to the ``Isaac`` directory on the NVIDIA Nucleus Server."""
+
+ISAAC_ORBIT_NUCLEUS_DIR=f'{ISAAC_NUCLEUS_DIR}/Samples/Orbit'
+"""Path to the ``Isaac/Samples/Orbit`` directory on the NVIDIA Nucleus Server."""
+
+
+
[docs]defcheck_file_path(path:str)->Literal[0,1,2]:
+"""Checks if a file exists on the Nucleus Server or locally.
+
+ Args:
+ path: The path to the file.
+
+ Returns:
+ The status of the file. Possible values are listed below.
+
+ * :obj:`0` if the file does not exist
+ * :obj:`1` if the file exists locally
+ * :obj:`2` if the file exists on the Nucleus Server
+ """
+ ifos.path.isfile(path):
+ return1
+ elifomni.client.stat(path)[0]==omni.client.Result.OK:
+ return2
+ else:
+ return0
+
+
+
[docs]defretrieve_file_path(path:str,download_dir:str|None=None,force_download:bool=True)->str:
+"""Retrieves the path to a file on the Nucleus Server or locally.
+
+ If the file exists locally, then the absolute path to the file is returned.
+ If the file exists on the Nucleus Server, then the file is downloaded to the local machine
+ and the absolute path to the file is returned.
+
+ Args:
+ path: The path to the file.
+ download_dir: The directory where the file should be downloaded. Defaults to None, in which
+ case the file is downloaded to the system's temporary directory.
+ force_download: Whether to force download the file from the Nucleus Server. This will overwrite
+ the local file if it exists. Defaults to True.
+
+ Returns:
+ The path to the file on the local machine.
+
+ Raises:
+ FileNotFoundError: When the file not found locally or on Nucleus Server.
+ RuntimeError: When the file cannot be copied from the Nucleus Server to the local machine. This
+ can happen when the file already exists locally and :attr:`force_download` is set to False.
+ """
+ # check file status
+ file_status=check_file_path(path)
+ iffile_status==1:
+ returnos.path.abspath(path)
+ eliffile_status==2:
+ # resolve download directory
+ ifdownload_dirisNone:
+ download_dir=tempfile.gettempdir()
+ else:
+ download_dir=os.path.abspath(download_dir)
+ # create download directory if it does not exist
+ ifnotos.path.exists(download_dir):
+ os.makedirs(download_dir)
+ # download file in temp directory using os
+ file_name=os.path.basename(omni.client.break_url(path).path)
+ target_path=os.path.join(download_dir,file_name)
+ # check if file already exists locally
+ ifnotos.path.isfile(target_path)orforce_download:
+ # copy file to local machine
+ result=omni.client.copy(path,target_path)
+ ifresult!=omni.client.Result.OKandforce_download:
+ raiseRuntimeError(f"Unable to copy file: '{path}'. Is the Nucleus Server running?")
+ returnos.path.abspath(target_path)
+ else:
+ raiseFileNotFoundError(f'Unable to find the file: {path}')
+
+
+
[docs]defread_file(path:str)->io.BytesIO:
+"""Reads a file from the Nucleus Server or locally.
+
+ Args:
+ path: The path to the file.
+
+ Raises:
+ FileNotFoundError: When the file not found locally or on Nucleus Server.
+
+ Returns:
+ The content of the file.
+ """
+ # check file status
+ file_status=check_file_path(path)
+ iffile_status==1:
+ withopen(path,'rb')asf:
+ returnio.BytesIO(f.read())
+ eliffile_status==2:
+ file_content=omni.client.read_file(path)[2]
+ returnio.BytesIO(memoryview(file_content).tobytes())
+ else:
+ raiseFileNotFoundError(f'Unable to find the file: {path}')
+# Copyright (c) 2022-2024, The ORBIT Project Developers.
+# All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+"""Sub-module that provides a wrapper around the Python 3.7 onwards ``dataclasses`` module."""
+
+importinspect
+fromcollections.abcimportCallable
+fromcopyimportdeepcopy
+fromdataclassesimportMISSING,Field,dataclass,field,replace
+fromtypingimportAny,ClassVar
+
+from.dictimportclass_to_dict,update_class_from_dict
+
+_CONFIGCLASS_METHODS=['to_dict','from_dict','replace','copy']
+"""List of class methods added at runtime to dataclass."""
+"""
+Wrapper around dataclass.
+"""
+
+
+def__dataclass_transform__():
+"""Add annotations decorator for PyLance."""
+ returnlambdaa:a
+
+
+
[docs]@__dataclass_transform__()
+defconfigclass(cls,**kwargs):
+"""Wrapper around `dataclass` functionality to add extra checks and utilities.
+
+ As of Python 3.7, the standard dataclasses have two main issues which makes them non-generic for
+ configuration use-cases. These include:
+
+ 1. Requiring a type annotation for all its members.
+ 2. Requiring explicit usage of :meth:`field(default_factory=...)` to reinitialize mutable variables.
+
+ This function provides a decorator that wraps around Python's `dataclass`_ utility to deal with
+ the above two issues. It also provides additional helper functions for dictionary <-> class
+ conversion and easily copying class instances.
+
+ Usage:
+
+ .. code-block:: python
+
+ from dataclasses import MISSING
+
+ from omni.isaac.orbit.utils.configclass import configclass
+
+
+ @configclass
+ class ViewerCfg:
+ eye: list = [7.5, 7.5, 7.5] # field missing on purpose
+ lookat: list = field(default_factory=[0.0, 0.0, 0.0])
+
+
+ @configclass
+ class EnvCfg:
+ num_envs: int = MISSING
+ episode_length: int = 2000
+ viewer: ViewerCfg = ViewerCfg()
+
+ # create configuration instance
+ env_cfg = EnvCfg(num_envs=24)
+
+ # print information as a dictionary
+ print(env_cfg.to_dict())
+
+ # create a copy of the configuration
+ env_cfg_copy = env_cfg.copy()
+
+ # replace arbitrary fields using keyword arguments
+ env_cfg_copy = env_cfg_copy.replace(num_envs=32)
+
+ Args:
+ cls: The class to wrap around.
+ **kwargs: Additional arguments to pass to :func:`dataclass`.
+
+ Returns:
+ The wrapped class.
+
+ .. _dataclass: https://docs.python.org/3/library/dataclasses.html
+ """
+ # add type annotations
+ _add_annotation_types(cls)
+ # add field factory
+ _process_mutable_types(cls)
+ # copy mutable members
+ # note: we check if user defined __post_init__ function exists and augment it with our own
+ ifhasattr(cls,'__post_init__'):
+ setattr(cls,'__post_init__',_combined_function(cls.__post_init__,_custom_post_init))
+ else:
+ setattr(cls,'__post_init__',_custom_post_init)
+ # add helper functions for dictionary conversion
+ setattr(cls,'to_dict',_class_to_dict)
+ setattr(cls,'from_dict',_update_class_from_dict)
+ setattr(cls,'replace',_replace_class_with_kwargs)
+ setattr(cls,'copy',_copy_class)
+ # wrap around dataclass
+ cls=dataclass(cls,**kwargs)
+ # return wrapped class
+ returncls
+
+
+"""
+Dictionary <-> Class operations.
+
+These are redefined here to add new docstrings.
+"""
+
+
+def_class_to_dict(obj:object)->dict[str,Any]:
+"""Convert an object into dictionary recursively.
+
+ Returns:
+ Converted dictionary mapping.
+ """
+ returnclass_to_dict(obj)
+
+
+def_update_class_from_dict(obj,data:dict[str,Any])->None:
+"""Reads a dictionary and sets object variables recursively.
+
+ This function performs in-place update of the class member attributes.
+
+ Args:
+ data: Input (nested) dictionary to update from.
+
+ Raises:
+ TypeError: When input is not a dictionary.
+ ValueError: When dictionary has a value that does not match default config type.
+ KeyError: When dictionary has a key that does not exist in the default config type.
+ """
+ returnupdate_class_from_dict(obj,data,_ns='')
+
+
+def_replace_class_with_kwargs(obj:object,**kwargs)->object:
+"""Return a new object replacing specified fields with new values.
+
+ This is especially useful for frozen classes. Example usage:
+
+ .. code-block:: python
+
+ @configclass(frozen=True)
+ class C:
+ x: int
+ y: int
+
+ c = C(1, 2)
+ c1 = c.replace(x=3)
+ assert c1.x == 3 and c1.y == 2
+
+ Args:
+ obj: The object to replace.
+ **kwargs: The fields to replace and their new values.
+
+ Returns:
+ The new object.
+ """
+ returnreplace(obj,**kwargs)
+
+
+def_copy_class(obj:object)->object:
+"""Return a new object with the same fields as the original."""
+ returnreplace(obj)
+
+
+"""
+Private helper functions.
+"""
+
+
+def_add_annotation_types(cls):
+"""Add annotations to all elements in the dataclass.
+
+ By definition in Python, a field is defined as a class variable that has a type annotation.
+
+ In case type annotations are not provided, dataclass ignores those members when :func:`__dict__()` is called.
+ This function adds these annotations to the class variable to prevent any issues in case the user forgets to
+ specify the type annotation.
+
+ This makes the following a feasible operation:
+
+ @dataclass
+ class State:
+ pos = (0.0, 0.0, 0.0)
+ ^^
+ If the function is NOT used, the following type-error is returned:
+ TypeError: 'pos' is a field but has no type annotation
+ """
+ # get type hints
+ hints={}
+ # iterate over class inheritance
+ # we add annotations from base classes first
+ forbaseinreversed(cls.__mro__):
+ # check if base is object
+ ifbaseisobject:
+ continue
+ # get base class annotations
+ ann=base.__dict__.get('__annotations__',{})
+ # directly add all annotations from base class
+ hints.update(ann)
+ # iterate over base class members
+ # Note: Do not change this to dir(base) since it orders the members alphabetically.
+ # This is not desirable since the order of the members is important in some cases.
+ forkeyinbase.__dict__:
+ # get class member
+ value=getattr(base,key)
+ # skip members
+ if_skippable_class_member(key,value,hints):
+ continue
+ # add type annotations for members that don't have explicit type annotations
+ # for these, we deduce the type from the default value
+ ifnotisinstance(value,type):
+ ifkeynotinhints:
+ # check if var type is not MISSING
+ # we cannot deduce type from MISSING!
+ ifvalueisMISSING:
+ raiseTypeError(f"Missing type annotation for '{key}' in class '{cls.__name__}'."
+ ' Please add a type annotation or set a default value.')
+ # add type annotation
+ hints[key]=type(value)
+ elifkey!=value.__name__:
+ # note: we don't want to add type annotations for nested configclass. Thus, we check if
+ # the name of the type matches the name of the variable.
+ # since Python 3.10, type hints are stored as strings
+ hints[key]=f'type[{value.__name__}]'
+
+ # Note: Do not change this line. `cls.__dict__.get("__annotations__", {})` is different from
+ # `cls.__annotations__` because of inheritance.
+ cls.__annotations__=cls.__dict__.get('__annotations__',{})
+ cls.__annotations__=hints
+
+
+def_process_mutable_types(cls):
+"""Initialize all mutable elements through :obj:`dataclasses.Field` to avoid unnecessary complaints.
+
+ By default, dataclass requires usage of :obj:`field(default_factory=...)` to reinitialize mutable objects every time a new
+ class instance is created. If a member has a mutable type and it is created without specifying the `field(default_factory=...)`,
+ then Python throws an error requiring the usage of `default_factory`.
+
+ Additionally, Python only explicitly checks for field specification when the type is a list, set or dict. This misses the
+ use-case where the type is class itself. Thus, the code silently carries a bug with it which can lead to undesirable effects.
+
+ This function deals with this issue
+
+ This makes the following a feasible operation:
+
+ @dataclass
+ class State:
+ pos: list = [0.0, 0.0, 0.0]
+ ^^
+ If the function is NOT used, the following value-error is returned:
+ ValueError: mutable default <class 'list'> for field pos is not allowed: use default_factory
+ """
+ # note: Need to set this up in the same order as annotations. Otherwise, it
+ # complains about missing positional arguments.
+ ann=cls.__dict__.get('__annotations__',{})
+
+ # iterate over all class members and store them in a dictionary
+ class_members={}
+ forbaseinreversed(cls.__mro__):
+ # check if base is object
+ ifbaseisobject:
+ continue
+ # iterate over base class members
+ forkeyinbase.__dict__:
+ # get class member
+ f=getattr(base,key)
+ # skip members
+ if_skippable_class_member(key,f):
+ continue
+ # store class member if it is not a type or if it is already present in annotations
+ ifnotisinstance(f,type)orkeyinann:
+ class_members[key]=f
+ # iterate over base class data fields
+ # in previous call, things that became a dataclass field were removed from class members
+ # so we need to add them back here as a dataclass field directly
+ forkey,finbase.__dict__.get('__dataclass_fields__',{}).items():
+ # store class member
+ ifnotisinstance(f,type):
+ class_members[key]=f
+
+ # check that all annotations are present in class members
+ # note: mainly for debugging purposes
+ iflen(class_members)!=len(ann):
+ raiseValueError(
+ f"In class '{cls.__name__}', number of annotations ({len(ann)}) does not match number of class members"
+ f' ({len(class_members)}). Please check that all class members have type annotations and/or a default'
+ " value. If you don't want to specify a default value, please use the literal `dataclasses.MISSING`.")
+ # iterate over annotations and add field factory for mutable types
+ forkeyinann:
+ # find matching field in class
+ value=class_members.get(key,MISSING)
+ # check if key belongs to ClassVar
+ # in that case, we cannot use default_factory!
+ origin=getattr(ann[key],'__origin__',None)
+ iforiginisClassVar:
+ continue
+ # check if f is MISSING
+ # note: commented out for now since it causes issue with inheritance
+ # of dataclasses when parent have some positional and some keyword arguments.
+ # Ref: https://stackoverflow.com/questions/51575931/class-inheritance-in-python-3-7-dataclasses
+ # TODO: check if this is fixed in Python 3.10
+ # if f is MISSING:
+ # continue
+ ifisinstance(value,Field):
+ setattr(cls,key,value)
+ elifnotisinstance(value,type):
+ # create field factory for mutable types
+ value=field(default_factory=_return_f(value))
+ setattr(cls,key,value)
+
+
+def_custom_post_init(obj):
+"""Deepcopy all elements to avoid shared memory issues for mutable objects in dataclasses initialization.
+
+ This function is called explicitly instead of as a part of :func:`_process_mutable_types()` to prevent mapping
+ proxy type i.e. a read only proxy for mapping objects. The error is thrown when using hierarchical data-classes
+ for configuration.
+ """
+ forkeyindir(obj):
+ # skip dunder members
+ ifkey.startswith('__'):
+ continue
+ # get data member
+ value=getattr(obj,key)
+ # duplicate data members
+ ifnotcallable(value):
+ setattr(obj,key,deepcopy(value))
+
+
+def_combined_function(f1:Callable,f2:Callable)->Callable:
+"""Combine two functions into one.
+
+ Args:
+ f1: The first function.
+ f2: The second function.
+
+ Returns:
+ The combined function.
+ """
+
+ def_combined(*args,**kwargs):
+ # call both functions
+ f1(*args,**kwargs)
+ f2(*args,**kwargs)
+
+ return_combined
+
+
+"""
+Helper functions
+"""
+
+
+def_skippable_class_member(key:str,value:Any,hints:dict|None=None)->bool:
+"""Check if the class member should be skipped in configclass processing.
+
+ The following members are skipped:
+
+ * Dunder members: ``__name__``, ``__module__``, ``__qualname__``, ``__annotations__``, ``__dict__``.
+ * Manually-added special class functions: From :obj:`_CONFIGCLASS_METHODS`.
+ * Members that are already present in the type annotations.
+ * Functions bounded to class object or class.
+
+ Args:
+ key: The class member name.
+ value: The class member value.
+ hints: The type hints for the class. Defaults to None, in which case, the
+ members existence in type hints are not checked.
+
+ Returns:
+ True if the class member should be skipped, False otherwise.
+ """
+ # skip dunder members
+ ifkey.startswith('__'):
+ returnTrue
+ # skip manually-added special class functions
+ ifkeyin_CONFIGCLASS_METHODS:
+ returnTrue
+ # check if key is already present
+ ifhintsisnotNoneandkeyinhints:
+ returnTrue
+ # skip functions bounded to class
+ ifcallable(value):
+ signature=inspect.signature(value)
+ if'self'insignature.parametersor'cls'insignature.parameters:
+ returnTrue
+ # Otherwise, don't skip
+ returnFalse
+
+
+def_return_f(f:Any)->Callable[[],Any]:
+"""Returns default factory function for creating mutable/immutable variables.
+
+ This function should be used to create default factory functions for variables.
+
+ Example:
+
+ .. code-block:: python
+
+ value = field(default_factory=_return_f(value))
+ setattr(cls, key, value)
+ """
+
+ def_wrap():
+ ifisinstance(f,Field):
+ iff.default_factoryisMISSING:
+ returndeepcopy(f.default)
+ else:
+ returnf.default_factory
+ else:
+ returnf
+
+ return_wrap
+
+# Copyright (c) 2022-2024, The ORBIT Project Developers.
+# All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+# yapf: disable
+
+"""Sub-module for utilities for working with dictionaries."""
+
+importcollections.abc
+importhashlib
+importjson
+fromcollections.abcimportIterable,Mapping
+fromtypingimportAny
+
+from.arrayimportTENSOR_TYPE_CONVERSIONS,TENSOR_TYPES
+from.stringimportcallable_to_string,string_to_callable
+
+"""
+Dictionary <-> Class operations.
+"""
+
+
+
[docs]defclass_to_dict(obj:object)->dict[str,Any]:
+"""Convert an object into dictionary recursively.
+
+ Note:
+ Ignores all names starting with "__" (i.e. built-in methods).
+
+ Args:
+ obj: An instance of a class to convert.
+
+ Raises:
+ ValueError: When input argument is not an object.
+
+ Returns:
+ Converted dictionary mapping.
+ """
+ # check that input data is class instance
+ ifnothasattr(obj,'__class__'):
+ raiseValueError(f'Expected a class instance. Received: {type(obj)}.')
+ # convert object to dictionary
+ ifisinstance(obj,dict):
+ obj_dict=obj
+ else:
+ obj_dict=obj.__dict__
+ # convert to dictionary
+ data=dict()
+ forkey,valueinobj_dict.items():
+ # disregard builtin attributes
+ ifkey.startswith('__'):
+ continue
+ # check if attribute is callable -- function
+ ifcallable(value):
+ data[key]=callable_to_string(value)
+ # check if attribute is a dictionary
+ elifhasattr(value,'__dict__')orisinstance(value,dict):
+ data[key]=class_to_dict(value)
+ else:
+ data[key]=value
+ returndata
+
+
+
[docs]defupdate_class_from_dict(obj,data:dict[str,Any],_ns:str='')->None:
+"""Reads a dictionary and sets object variables recursively.
+
+ This function performs in-place update of the class member attributes.
+
+ Args:
+ obj: An instance of a class to update.
+ data: Input dictionary to update from.
+ _ns: Namespace of the current object. This is useful for nested configuration
+ classes or dictionaries. Defaults to "".
+
+ Raises:
+ TypeError: When input is not a dictionary.
+ ValueError: When dictionary has a value that does not match default config type.
+ KeyError: When dictionary has a key that does not exist in the default config type.
+ """
+ forkey,valueindata.items():
+ # key_ns is the full namespace of the key
+ key_ns=_ns+'/'+key
+ # check if key is present in the object
+ ifhasattr(obj,key):
+ obj_mem=getattr(obj,key)
+ ifisinstance(obj_mem,Mapping):
+ # Note: We don't handle two-level nested dictionaries. Just use configclass if this is needed.
+ # iterate over the dictionary to look for callable values
+ fork,vinobj_mem.items():
+ ifcallable(v):
+ value[k]=string_to_callable(value[k])
+ setattr(obj,key,value)
+ elifisinstance(value,Mapping):
+ # recursively call if it is a dictionary
+ update_class_from_dict(obj_mem,value,_ns=key_ns)
+ elifisinstance(value,Iterable)andnotisinstance(value,str):
+ # check length of value to be safe
+ iflen(obj_mem)!=len(value)andobj_memisnotNone:
+ raiseValueError(
+ f'[Config]: Incorrect length under namespace: {key_ns}.'
+ f' Expected: {len(obj_mem)}, Received: {len(value)}.'
+ )
+ # set value
+ setattr(obj,key,value)
+ elifcallable(obj_mem):
+ # update function name
+ value=string_to_callable(value)
+ setattr(obj,key,value)
+ elifisinstance(value,type(obj_mem)):
+ # check that they are type-safe
+ setattr(obj,key,value)
+ else:
+ raiseValueError(
+ f'[Config]: Incorrect type under namespace: {key_ns}.'
+ f' Expected: {type(obj_mem)}, Received: {type(value)}.'
+ )
+ else:
+ raiseKeyError(f'[Config]: Key not found under namespace: {key_ns}.')
[docs]defdict_to_md5_hash(data:object)->str:
+"""Convert a dictionary into a hashable key using MD5 hash.
+
+ Args:
+ data: Input dictionary or configuration object to convert.
+
+ Returns:
+ A string object of double length containing only hexadecimal digits.
+ """
+ # convert to dictionary
+ ifisinstance(data,dict):
+ encoded_buffer=json.dumps(data,sort_keys=True).encode()
+ else:
+ encoded_buffer=json.dumps(class_to_dict(data),sort_keys=True).encode()
+ # compute hash using MD5
+ data_hash=hashlib.md5()
+ data_hash.update(encoded_buffer)
+ # return the hash key
+ returndata_hash.hexdigest()
+
+
+"""
+Dictionary operations.
+"""
+
+
+
[docs]defconvert_dict_to_backend(
+ data:dict,backend:str='numpy',array_types:Iterable[str]=('numpy','torch','warp')
+)->dict:
+"""Convert all arrays or tensors in a dictionary to a given backend.
+
+ This function iterates over the dictionary, converts all arrays or tensors with the given types to
+ the desired backend, and stores them in a new dictionary. It also works with nested dictionaries.
+
+ Currently supported backends are "numpy", "torch", and "warp".
+
+ Note:
+ This function only converts arrays or tensors. Other types of data are left unchanged. Mutable types
+ (e.g. lists) are referenced by the new dictionary, so they are not copied.
+
+ Args:
+ data: An input dict containing array or tensor data as values.
+ backend: The backend ("numpy", "torch", "warp") to which arrays in this dict should be converted.
+ Defaults to "numpy".
+ array_types: A list containing the types of arrays that should be converted to
+ the desired backend. Defaults to ("numpy", "torch", "warp").
+
+ Raises:
+ ValueError: If the specified ``backend`` or ``array_types`` are unknown, i.e. not in the list of supported
+ backends ("numpy", "torch", "warp").
+
+ Returns:
+ The updated dict with the data converted to the desired backend.
+ """
+ # THINK: Should we also support converting to a specific device, e.g. "cuda:0"?
+ # Check the backend is valid.
+ ifbackendnotinTENSOR_TYPE_CONVERSIONS:
+ raiseValueError(f"Unknown backend '{backend}'. Supported backends are 'numpy', 'torch', and 'warp'.")
+ # Define the conversion functions for each backend.
+ tensor_type_conversions=TENSOR_TYPE_CONVERSIONS[backend]
+
+ # Parse the array types and convert them to the corresponding types: "numpy" -> np.ndarray, etc.
+ parsed_types=list()
+ fortinarray_types:
+ # Check type is valid.
+ iftnotinTENSOR_TYPES:
+ raiseValueError(f"Unknown array type: '{t}'. Supported array types are 'numpy', 'torch', and 'warp'.")
+ # Exclude types that match the backend, since we do not need to convert these.
+ ift==backend:
+ continue
+ # Convert the string types to the corresponding types.
+ parsed_types.append(TENSOR_TYPES[t])
+
+ # Convert the data to the desired backend.
+ output_dict=dict()
+ forkey,valueindata.items():
+ # Obtain the data type of the current value.
+ data_type=type(value)
+ # -- arrays
+ ifdata_typeinparsed_types:
+ # check if we have a known conversion.
+ ifdata_typenotintensor_type_conversions:
+ raiseValueError(f'No registered conversion for data type: {data_type} to {backend}!')
+ # convert the data to the desired backend.
+ output_dict[key]=tensor_type_conversions[data_type](value)
+ # -- nested dictionaries
+ elifisinstance(data[key],dict):
+ output_dict[key]=convert_dict_to_backend(value)
+ # -- everything else
+ else:
+ output_dict[key]=value
+
+ returnoutput_dict
+
+
+
[docs]defupdate_dict(orig_dict:dict,new_dict:collections.abc.Mapping)->dict:
+"""Updates existing dictionary with values from a new dictionary.
+
+ This function mimics the dict.update() function. However, it works for
+ nested dictionaries as well.
+
+ Reference:
+ https://stackoverflow.com/questions/3232943/update-value-of-a-nested-dictionary-of-varying-depth
+
+ Args:
+ orig_dict: The original dictionary to insert items to.
+ new_dict: The new dictionary to insert items from.
+
+ Returns:
+ The updated dictionary.
+ """
+ forkeyname,valueinnew_dict.items():
+ ifisinstance(value,collections.abc.Mapping):
+ orig_dict[keyname]=update_dict(orig_dict.get(keyname,{}),value)
+ else:
+ orig_dict[keyname]=value
+ returnorig_dict
[docs]defcompute_path_bbox(prim_path:str)->typing.Tuple[carb.Double3,carb.Double3]:
+"""
+ Compute Bounding Box using omni.usd.UsdContext.compute_path_world_bounding_box
+ See https://docs.omniverse.nvidia.com/kit/docs/omni.usd/latest/omni.usd/omni.usd.UsdContext.html#\
+ omni.usd.UsdContext.compute_path_world_bounding_box
+
+ Args:
+ prim_path: A prim path to compute the bounding box.
+ Returns:
+ A range (i.e. bounding box) as a minimum point and maximum point.
+ """
+ returnomni.usd.get_context().compute_path_world_bounding_box(prim_path)
+
+
+
[docs]defget_pick_position(robot_base_position:np.ndarray,prim_path:str)->np.ndarray:
+"""Get the pick position for a manipulator robots to pick an objects at prim_path.
+ The pick position is simply the nearest top vertex of the objects's bounding box.
+
+ Args:
+ robot_base_position (np.ndarray): robots base position.
+ prim_path (str): prim path of objects to pick.
+
+ Returns:
+ np.ndarray: pick position.
+ """
+ bbox_0,bbox_1=compute_path_bbox(prim_path)
+
+ x1=bbox_0[0]
+ x2=bbox_1[0]
+ y1=bbox_0[1]
+ y2=bbox_1[1]
+ top_z=bbox_0[2]ifbbox_0[2]>bbox_1[2]elsebbox_1[2]
+
+ top_vertices=[
+ np.array([x1,y1,top_z]),
+ np.array([x1,y2,top_z]),
+ np.array([x2,y1,top_z]),
+ np.array([x2,y2,top_z]),
+ ]
+
+ print('================================ Top vertices: ',top_vertices,' ====================================')
+
+ pick_position=top_vertices[0]
+ forvertexintop_vertices:
+ ifnp.linalg.norm(robot_base_position-vertex)<np.linalg.norm(robot_base_position-pick_position):
+ pick_position=vertex
+
+ returnpick_position
+
+
+
[docs]defget_grabbed_able_xform_paths(root_path:str,prim:Usd.Prim,depth:int=3)->typing.List[str]:
+"""get all prim paths of Xform objects under specified prim.
+
+ Args:
+ root_path (str): root path of scenes.
+ prim (Usd.Prim): target prim.
+ depth (int, optional): expected depth of Xform objects relative to root_path. Defaults to 3.
+
+ Returns:
+ typing.List[str]: prim paths.
+ """
+ paths=[]
+ ifprimisNone:
+ returnpaths
+ print(f'get_grabbed_able_xform_paths: start to traverse {prim.GetPrimPath()}')
+ relative_prim_path=str(prim.GetPrimPath())[len(root_path):]
+ ifrelative_prim_path.count('/')<=depth:
+ forchildinprim.GetChildren():
+ ifchild.GetTypeName()=='Scope':
+ paths.extend(get_grabbed_able_xform_paths(root_path,child))
+ ifchild.GetTypeName()=='Xform':
+ paths.append(str(child.GetPrimPath()))
+
+ returnpaths
+
+
+
[docs]defget_world_transform_xform(prim:Usd.Prim)->typing.Tuple[Gf.Vec3d,Gf.Rotation,Gf.Vec3d]:
+"""
+ Get the local transformation of a prim using omni.usd.get_world_transform_matrix().
+ See https://docs.omniverse.nvidia.com/kit/docs/omni.usd/latest/omni.usd/omni.usd.get_world_transform_matrix.html
+ Args:
+ prim: The prim to calculate the world transformation.
+ Returns:
+ A tuple of:
+ - Translation vector.
+ - Rotation quaternion, i.e. 3d vector plus angle.
+ - Scale vector.
+ """
+ world_transform:Gf.Matrix4d=omni.usd.get_world_transform_matrix(prim)
+ translation:Gf.Vec3d=world_transform.ExtractTranslation()
+ rotation:Gf.Rotation=world_transform.ExtractRotation()
+ scale:Gf.Vec3d=Gf.Vec3d(*(v.GetLength()forvinworld_transform.ExtractRotationMatrix()))
+ returntranslation,rotation,scale
+
+
+
[docs]defnearest_xform_from_position(stage:Usd.Stage,
+ xform_paths:typing.List[str],
+ position:np.ndarray,
+ threshold:float=0)->str:
+"""get prim path of nearest Xform objects from the target position.
+
+ Args:
+ stage (Usd.Stage): usd stage.
+ xform_paths (typing.List[str]): full list of xforms paths.
+ position (np.ndarray): target position.
+ threshold (float, optional): max distance. Defaults to 0 (unlimited).
+
+ Returns:
+ str: prim path of the Xform objects, None if not found.
+ """
+ start=time.time()
+ ifthreshold==0:
+ threshold=1000000.0
+ min_dist=threshold
+ nearest_prim_path=None
+ forpathinxform_paths:
+ prim=stage.GetPrimAtPath(path)
+ ifprimisnotNoneandprim.IsValid():
+ pose=get_world_transform_xform(prim)
+ dist=np.linalg.norm(pose[0]-position)
+ ifdist<min_dist:
+ min_dist=dist
+ nearest_prim_path=path
+
+ print(f'nearest_xform_from_position costs: {time.time()-start}')
+ returnnearest_prim_path
+"""
+A set of utility functions for general python usage
+"""
+importinspect
+importre
+fromabcimportABCMeta
+fromcollections.abcimportIterable
+fromcopyimportdeepcopy
+fromfunctoolsimportwraps
+fromimportlibimportimport_module
+
+importnumpyasnp
+
+# Global dictionary storing all unique names
+NAMES=set()
+CLASS_NAMES=set()
+
+
+classClassProperty:
+
+ def__init__(self,f_get):
+ self.f_get=f_get
+
+ def__get__(self,owner_self,owner_cls):
+ returnself.f_get(owner_cls)
+
+
+
[docs]defsubclass_factory(name,base_classes,__init__=None,**kwargs):
+"""
+ Programmatically generates a new class type with name @name, subclassing from base classes @base_classes, with
+ corresponding __init__ call @__init__.
+
+ NOTE: If __init__ is None (default), the __init__ call from @base_classes will be used instead.
+
+ cf. https://stackoverflow.com/questions/15247075/how-can-i-dynamically-create-derived-classes-from-a-base-class
+
+ Args:
+ name (str): Generated class name
+ base_classes (type, or list of type): Base class(es) to use for generating the subclass
+ __init__ (None or function): Init call to use for the base class when it is instantiated. If None if specified,
+ the newly generated class will automatically inherit the __init__ call from @base_classes
+ **kwargs (any): keyword-mapped parameters to override / set in the child class, where the keys represent
+ the class / instance attribute to modify and the values represent the functions / value to set
+ """
+ # Standardize base_classes
+ base_classes=tuple(base_classesifisinstance(base_classes,Iterable)else[base_classes])
+
+ # Generate the new class
+ if__init__isnotNone:
+ kwargs['__init__']=__init__
+ returntype(name,base_classes,kwargs)
+
+
+
[docs]defsave_init_info(func):
+"""
+ Decorator to save the init info of an objects to objects._init_info.
+
+ _init_info contains class name and class constructor's input args.
+ """
+ sig=inspect.signature(func)
+
+ @wraps(func)# preserve func name, docstring, arguments list, etc.
+ defwrapper(self,*args,**kwargs):
+ values=sig.bind(self,*args,**kwargs)
+
+ # Prevent args of super init from being saved.
+ ifhasattr(self,'_init_info'):
+ func(*values.args,**values.kwargs)
+ return
+
+ # Initialize class's self._init_info.
+ self._init_info={'class_module':self.__class__.__module__,'class_name':self.__class__.__name__,'args':{}}
+
+ # Populate class's self._init_info.
+ fork,pinsig.parameters.items():
+ ifk=='self':
+ continue
+ ifkinvalues.arguments:
+ val=values.arguments[k]
+ ifp.kindin(inspect.Parameter.POSITIONAL_OR_KEYWORD,inspect.Parameter.KEYWORD_ONLY):
+ self._init_info['args'][k]=val
+ elifp.kind==inspect.Parameter.VAR_KEYWORD:
+ forkwarg_k,kwarg_valinvalues.arguments[k].items():
+ self._init_info['args'][kwarg_k]=kwarg_val
+
+ # Call the original function.
+ func(*values.args,**values.kwargs)
+
+ returnwrapper
+
+
+
[docs]classRecreatableMeta(type):
+"""
+ Simple metaclass that automatically saves __init__ args of the instances it creates.
+ """
+
+ def__new__(cls,clsname,bases,clsdict):
+ if'__init__'inclsdict:
+ clsdict['__init__']=save_init_info(clsdict['__init__'])
+ returnsuper().__new__(cls,clsname,bases,clsdict)
+
+
+
[docs]classRecreatableAbcMeta(RecreatableMeta,ABCMeta):
+"""
+ A composite metaclass of both RecreatableMeta and ABCMeta.
+
+ Adding in ABCMeta to resolve metadata conflicts.
+ """
+
+ pass
+
+
+
[docs]classRecreatable(metaclass=RecreatableAbcMeta):
+"""
+ Simple class that provides an abstract interface automatically saving __init__ args of
+ the classes inheriting it.
+ """
+
+
[docs]defget_init_info(self):
+"""
+ Grabs relevant initialization information for this class instance. Useful for directly
+ reloading an objects from this information, using @create_object_from_init_info.
+
+ Returns:
+ dict: Nested dictionary that contains this objects' initialization information
+ """
+ # Note: self._init_info is procedurally generated via @save_init_info called in metaclass
+ returnself._init_info
+
+
+
[docs]defcreate_object_from_init_info(init_info):
+"""
+ Create a new objects based on given init info.
+
+ Args:
+ init_info (dict): Nested dictionary that contains an objects's init information.
+
+ Returns:
+ any: Newly created objects.
+ """
+ module=import_module(init_info['class_module'])
+ cls=getattr(module,init_info['class_name'])
+ returncls(**init_info['args'],**init_info.get('kwargs',{}))
+
+
+
[docs]defmerge_nested_dicts(base_dict,extra_dict,inplace=False,verbose=False):
+"""
+ Iteratively updates @base_dict with values from @extra_dict. Note: This generates a new dictionary!
+
+ Args:
+ base_dict (dict): Nested base dictionary, which should be updated with all values from @extra_dict
+ extra_dict (dict): Nested extra dictionary, whose values will overwrite corresponding ones in @base_dict
+ inplace (bool): Whether to modify @base_dict in place or not
+ verbose (bool): If True, will print when keys are mismatched
+
+ Returns:
+ dict: Updated dictionary
+ """
+ # Loop through all keys in @extra_dict and update the corresponding values in @base_dict
+ base_dict=base_dictifinplaceelsedeepcopy(base_dict)
+ fork,vinextra_dict.items():
+ ifknotinbase_dict:
+ base_dict[k]=v
+ else:
+ ifisinstance(v,dict)andisinstance(base_dict[k],dict):
+ base_dict[k]=merge_nested_dicts(base_dict[k],v)
+ else:
+ not_equal=base_dict[k]!=v
+ ifisinstance(not_equal,np.ndarray):
+ not_equal=not_equal.any()
+ ifnot_equalandverbose:
+ print(f'Different values for key {k}: {base_dict[k]}, {v}\n')
+ base_dict[k]=np.array(v)ifisinstance(v,list)elsev
+
+ # Return new dict
+ returnbase_dict
+
+
+
[docs]defget_class_init_kwargs(cls):
+"""
+ Helper function to return a list of all valid keyword arguments (excluding "self") for the given @cls class.
+
+ Args:
+ cls (object): Class from which to grab __init__ kwargs
+
+ Returns:
+ list: All keyword arguments (excluding "self") specified by @cls __init__ constructor method
+ """
+ returnlist(inspect.signature(cls.__init__).parameters.keys())[1:]
+
+
+
[docs]defextract_subset_dict(dic,keys,copy=False):
+"""
+ Helper function to extract a subset of dictionary key-values from a current dictionary. Optionally (deep)copies
+ the values extracted from the original @dic if @copy is True.
+
+ Args:
+ dic (dict): Dictionary containing multiple key-values
+ keys (Iterable): Specific keys to extract from @dic. If the key doesn't exist in @dic, then the key is skipped
+ copy (bool): If True, will deepcopy all values corresponding to the specified @keys
+
+ Returns:
+ dict: Extracted subset dictionary containing only the specified @keys and their corresponding values
+ """
+ subset={k:dic[k]forkinkeysifkindic}
+ returndeepcopy(subset)ifcopyelsesubset
+
+
+
[docs]defextract_class_init_kwargs_from_dict(cls,dic,copy=False):
+"""
+ Helper function to return a dictionary of key-values that specifically correspond to @cls class's __init__
+ constructor method, from @dic which may or may not contain additional, irrelevant kwargs.
+ Note that @dic may possibly be missing certain kwargs as specified by cls.__init__. No error will be raised.
+
+ Args:
+ cls (object): Class from which to grab __init__ kwargs that will be be used as filtering keys for @dic
+ dic (dict): Dictionary containing multiple key-values
+ copy (bool): If True, will deepcopy all values corresponding to the specified @keys
+
+ Returns:
+ dict: Extracted subset dictionary possibly containing only the specified keys from cls.__init__ and their
+ corresponding values
+ """
+ # extract only relevant kwargs for this specific backbone
+ returnextract_subset_dict(
+ dic=dic,
+ keys=get_class_init_kwargs(cls),
+ copy=copy,
+ )
+
+
+
[docs]defassert_valid_key(key,valid_keys,name=None):
+"""
+ Helper function that asserts that @key is in dictionary @valid_keys keys. If not, it will raise an error.
+
+ Args:
+ key (any): key to check for in dictionary @dic's keys
+ valid_keys (Iterable): contains keys should be checked with @key
+ name (str or None): if specified, is the name associated with the key that will be printed out if the
+ key is not found. If None, default is "value"
+ """
+ ifnameisNone:
+ name='value'
+ assertkeyinvalid_keys,'Invalid {} received! Valid options are: {}, got: {}'.format(
+ name,
+ valid_keys.keys()ifisinstance(valid_keys,dict)elsevalid_keys,key)
+
+
+
[docs]defcreate_class_from_registry_and_config(cls_name,cls_registry,cfg,cls_type_descriptor):
+"""
+ Helper function to create a class with str type @cls_name, which should be a valid entry in @cls_registry, using
+ kwargs in dictionary form @cfg to pass to the constructor, with @cls_type_name specified for debugging
+
+ Args:
+ cls_name (str): Name of the class to create. This should correspond to the actual class type, in string form
+ cls_registry (dict): Class registry. This should map string names of valid classes to create to the
+ actual class type itself
+ cfg (dict): Any keyword arguments to pass to the class constructor
+ cls_type_descriptor (str): Description of the class type being created. This can be any string and is used
+ solely for debugging purposes
+
+ Returns:
+ any: Created class instance
+ """
+ # Make sure the requested class type is valid
+ assert_valid_key(key=cls_name,valid_keys=cls_registry,name=f'{cls_type_descriptor} type')
+
+ # Grab the kwargs relevant for the specific class
+ cls=cls_registry[cls_name]
+ cls_kwargs=extract_class_init_kwargs_from_dict(cls=cls,dic=cfg,copy=False)
+
+ # Create the class
+ returncls(**cls_kwargs)
+
+
+
[docs]defget_uuid(name,n_digits=8):
+"""
+ Helper function to create a unique @n_digits uuid given a unique @name
+
+ Args:
+ name (str): Name of the objects or class
+ n_digits (int): Number of digits of the uuid, default is 8
+
+ Returns:
+ int: uuid
+ """
+ returnabs(hash(name))%(10**n_digits)
+
+
+
[docs]defcamel_case_to_snake_case(camel_case_text):
+"""
+ Helper function to convert a camel case text to snake case, e.g. "StrawberrySmoothie" -> "strawberry_smoothie"
+
+ Args:
+ camel_case_text (str): Text in camel case
+
+ Returns:
+ str: snake case text
+ """
+ returnre.sub(r'(?<!^)(?=[A-Z])','_',camel_case_text).lower()
+
+
+
[docs]defsnake_case_to_camel_case(snake_case_text):
+"""
+ Helper function to convert a snake case text to camel case, e.g. "strawberry_smoothie" -> "StrawberrySmoothie"
+
+ Args:
+ snake_case_text (str): Text in snake case
+
+ Returns:
+ str: camel case text
+ """
+ return''.join(item.title()foriteminsnake_case_text.split('_'))
+
+
+
[docs]defmeets_minimum_version(test_version,minimum_version):
+"""
+ Verify that @test_version meets the @minimum_version
+
+ Args:
+ test_version (str): Python package version. Should be, e.g., 0.26.1
+ minimum_version (str): Python package version to test against. Should be, e.g., 0.27.2
+
+ Returns:
+ bool: Whether @test_version meets @minimum_version
+ """
+ test_nums=[int(num)fornumintest_version.split('.')]
+ minimum_nums=[int(num)fornuminminimum_version.split('.')]
+ assertlen(test_nums)==3
+ assertlen(minimum_nums)==3
+
+ fortest_num,minimum_numinzip(test_nums,minimum_nums):
+ iftest_num>minimum_num:
+ returnTrue
+ eliftest_num<minimum_num:
+ returnFalse
+ # Otherwise, we continue through all sub-versions
+
+ # If we get here, that means test_version == threshold_version, so this is a success
+ returnTrue
+
+
+
[docs]classUniquelyNamed:
+"""
+ Simple class that implements a name property, that must be implemented by a subclass. Note that any @Named
+ entity must be UNIQUE!
+ """
+
+ def__init__(self):
+ globalNAMES
+ # Register this objects, making sure it's name is unique
+ assertself.namenotinNAMES, \
+ f'UniquelyNamed objects with name {self.name} already exists!'
+ NAMES.add(self.name)
+
+ # def __del__(self):
+ # # Remove this objects name from the registry if it's still there
+ # self.remove_names(include_all_owned=True)
+
+
[docs]defremove_names(self,include_all_owned=True,skip_ids=None):
+"""
+ Checks if self.name exists in the global NAMES registry, and deletes it if so. Possibly also iterates through
+ all owned member variables and checks for their corresponding names if @include_all_owned is True.
+
+ Args:
+ include_all_owned (bool): If True, will iterate through all owned members of this instance and remove their
+ names as well, if they are UniquelyNamed
+
+ skip_ids (None or set of int): If specified, will skip over any ids in the specified set that are matched
+ to any attributes found (this compares id(attr) to @skip_ids).
+ """
+ # Make sure skip_ids is a set so we can pass this into the method, and add the dictionary so we don't
+ # get infinite recursive loops
+ skip_ids=set()ifskip_idsisNoneelseskip_ids
+ skip_ids.add(id(self))
+
+ # Check for this name, possibly remove it if it exists
+ ifself.nameinNAMES:
+ NAMES.remove(self.name)
+
+ # Also possibly iterate through all owned members and check if those are instances of UniquelyNamed
+ ifinclude_all_owned:
+ self._remove_names_recursively_from_dict(dic=self.__dict__,skip_ids=skip_ids)
+
+ def_remove_names_recursively_from_dict(self,dic,skip_ids=None):
+"""
+ Checks if self.name exists in the global NAMES registry, and deletes it if so
+
+ Args:
+ skip_ids (None or set): If specified, will skip over any objects in the specified set that are matched
+ to any attributes found.
+ """
+ # Make sure skip_ids is a set so we can pass this into the method, and add the dictionary so we don't
+ # get infinite recursive loops
+ skip_ids=set()ifskip_idsisNoneelseskip_ids
+ skip_ids.add(id(dic))
+
+ # Loop through all values in the inputted dictionary, and check if any of the values are UniquelyNamed
+ forname,valindic.items():
+ ifid(val)notinskip_ids:
+ # No need to explicitly add val to skip objects because the methods below handle adding it
+ ifisinstance(val,UniquelyNamed):
+ val.remove_names(include_all_owned=True,skip_ids=skip_ids)
+ elifisinstance(val,dict):
+ # Recursively iterate
+ self._remove_names_recursively_from_dict(dic=val,skip_ids=skip_ids)
+ elifhasattr(val,'__dict__'):
+ # Add the attribute and recursively iterate
+ skip_ids.add(id(val))
+ self._remove_names_recursively_from_dict(dic=val.__dict__,skip_ids=skip_ids)
+ else:
+ # Otherwise we just add the value to skip_ids so we don't check it again
+ skip_ids.add(id(val))
+
+ @property
+ defname(self):
+"""
+ Returns:
+ str: Name of this instance. Must be unique!
+ """
+ raiseNotImplementedError
+
+
+
[docs]classUniquelyNamedNonInstance:
+"""
+ Identical to UniquelyNamed, but intended for non-instanceable classes
+ """
+
+ def__init_subclass__(cls,**kwargs):
+ globalCLASS_NAMES
+ # Register this objects, making sure it's name is unique
+ assertcls.namenotinCLASS_NAMES, \
+ f'UniquelyNamed class with name {cls.name} already exists!'
+ CLASS_NAMES.add(cls.name)
+
+ @ClassProperty
+ defname(self):
+"""
+ Returns:
+ str: Name of this instance. Must be unique!
+ """
+ raiseNotImplementedError
+
+
+
[docs]classRegisterable:
+"""
+ Simple class template that provides an abstract interface for registering classes.
+ """
+
+ def__init_subclass__(cls,**kwargs):
+"""
+ Registers all subclasses as part of this registry. This is useful to decouple internal codebase from external
+ user additions. This way, users can add their custom subclasses by simply extending this class,
+ and it will automatically be registered internally. This allows users to then specify their classes
+ directly in string-form in e.g., their config files, without having to manually set the str-to-class mapping
+ in our code.
+ """
+ cls._register_cls()
+
+ @classmethod
+ def_register_cls(cls):
+"""
+ Register this class. Can be extended by subclass.
+ """
+ # print(f"registering: {cls.__name__}")
+ # print(f"registry: {cls._cls_registry}", cls.__name__ not in cls._cls_registry)
+ # print(f"do not register: {cls._do_not_register_classes}", cls.__name__ not in cls._do_not_register_classes)
+ # input()
+ ifcls.__name__notincls._cls_registryandcls.__name__notincls._do_not_register_classes:
+ cls._cls_registry[cls.__name__]=cls
+
+ @ClassProperty
+ def_do_not_register_classes(self):
+"""
+ Returns:
+ set of str: Name(s) of classes that should not be registered. Default is empty set.
+ Subclasses that shouldn't be added should call super() and then add their own class name to the set
+ """
+ returnset()
+
+ @ClassProperty
+ def_cls_registry(self):
+"""
+ Returns:
+ dict: Mapping from all registered class names to their classes. This should be a REFERENCE
+ to some external, global dictionary that will be filled-in at runtime.
+ """
+ raiseNotImplementedError()
+
+
+
[docs]classSerializable:
+"""
+ Simple class that provides an abstract interface to dump / load states, optionally with serialized functionality
+ as well.
+ """
+
+ @property
+ defstate_size(self):
+"""
+ Returns:
+ int: Size of this objects's serialized state
+ """
+ raiseNotImplementedError()
+
+ def_dump_state(self):
+"""
+ Dumps the state of this objects in dictionary form (can be empty). Should be implemented by subclass.
+
+ Returns:
+ dict: Keyword-mapped states of this objects
+ """
+ raiseNotImplementedError()
+
+
[docs]defdump_state(self,serialized=False):
+"""
+ Dumps the state of this objects in either dictionary of flattened numerical form.
+
+ Args:
+ serialized (bool): If True, will return the state of this objects as a 1D numpy array. Otherwise,
+ will return a (potentially nested) dictionary of states for this objects
+
+ Returns:
+ dict or n-array: Either:
+ - Keyword-mapped states of these objects, or
+ - encoded + serialized, 1D numerical np.array \
+ capturing this objects' state, where n is @self.state_size
+ """
+ state=self._dump_state()
+ returnself.serialize(state=state)ifserializedelsestate
+
+ def_load_state(self,state):
+"""
+ Load the internal state to this objects as specified by @state. Should be implemented by subclass.
+
+ Args:
+ state (dict): Keyword-mapped states of this objects to set
+ """
+ raiseNotImplementedError()
+
+
[docs]defload_state(self,state,serialized=False):
+"""
+ Deserializes and loads this objects' state based on @state
+
+ Args:
+ state (dict or n-array): Either:
+ - Keyword-mapped states of these objects, or
+ - encoded + serialized, 1D numerical np.array capturing this objects' state,
+ where n is @self.state_size
+ serialized (bool): If True, will interpret @state as a 1D numpy array. Otherwise,
+ will assume the input is a (potentially nested) dictionary of states for this objects
+ """
+ state=self.deserialize(state=state)ifserializedelsestate
+ self._load_state(state=state)
+
+ def_serialize(self,state):
+"""
+ Serializes nested dictionary state @state into a flattened 1D numpy array for encoding efficiency.
+ Should be implemented by subclass.
+
+ Args:
+ state (dict): Keyword-mapped states of this objects to encode. Should match structure of output from
+ self._dump_state()
+
+ Returns:
+ n-array: encoded + serialized, 1D numerical np.array capturing this objects's state
+ """
+ raiseNotImplementedError()
+
+
[docs]defserialize(self,state):
+"""
+ Serializes nested dictionary state @state into a flattened 1D numpy array for encoding efficiency.
+ Should be implemented by subclass.
+
+ Args:
+ state (dict): Keyword-mapped states of this objects to encode. Should match structure of output from
+ self._dump_state()
+
+ Returns:
+ n-array: encoded + serialized, 1D numerical np.array capturing this objects's state
+ """
+ # Simply returns self._serialize() for now. this is for future proofing
+ returnself._serialize(state=state)
+
+ def_deserialize(self,state):
+"""
+ De-serializes flattened 1D numpy array @state into nested dictionary state.
+ Should be implemented by subclass.
+
+ Args:
+ state (n-array): encoded + serialized, 1D numerical np.array capturing this objects's state
+
+ Returns:
+ 2-tuple:
+ - dict: Keyword-mapped states of this objects. Should match structure of output from
+ self._dump_state()
+ - int: current index of the flattened state vector that is left off. This is helpful for subclasses
+ that inherit partial deserializations from parent classes, and need to know where the
+ deserialization left off before continuing.
+ """
+ raiseNotImplementedError
+
+
[docs]defdeserialize(self,state):
+"""
+ De-serializes flattened 1D numpy array @state into nested dictionary state.
+ Should be implemented by subclass.
+
+ Args:
+ state (n-array): encoded + serialized, 1D numerical np.array capturing this objects's state
+
+ Returns:
+ dict: Keyword-mapped states of these objects. Should match structure of output from
+ self._dump_state()
+ """
+ # Sanity check the idx with the expected state size
+ state_dict,idx=self._deserialize(state=state)
+ assertidx==self.state_size,f'Invalid state deserialization occurred! Expected {self.state_size} total ' \
+ f'values to be deserialized, only {idx} were.'
+
+ returnstate_dict
+
+
+
[docs]classSerializableNonInstance:
+"""
+ Identical to Serializable, but intended for non-instance classes
+ """
+
+ @ClassProperty
+ defstate_size(self):
+"""
+ Returns:
+ int: Size of this objects's serialized state
+ """
+ raiseNotImplementedError()
+
+ @classmethod
+ def_dump_state(cls):
+"""
+ Dumps the state of this objects in dictionary form (can be empty). Should be implemented by subclass.
+
+ Returns:
+ dict: Keyword-mapped states of this objects
+ """
+ raiseNotImplementedError()
+
+
[docs]@classmethod
+ defdump_state(cls,serialized=False):
+"""
+ Dumps the state of this objects in either dictionary of flattened numerical form.
+
+ Args:
+ serialized (bool): If True, will return the state of this objects as a 1D numpy array. Otherwise,
+ will return a (potentially nested) dictionary of states for this objects
+
+ Returns:
+ dict or n-array: Either:
+ - Keyword-mapped states of these objects, or
+ - encoded + serialized, 1D numerical np.array capturing this objects' state, where n is @self.state_size
+ """
+ state=cls._dump_state()
+ returncls.serialize(state=state)ifserializedelsestate
+
+ @classmethod
+ def_load_state(cls,state):
+"""
+ Load the internal state to this objects as specified by @state. Should be implemented by subclass.
+
+ Args:
+ state (dict): Keyword-mapped states of these objects to set
+ """
+ raiseNotImplementedError()
+
+
[docs]@classmethod
+ defload_state(cls,state,serialized=False):
+"""
+ Deserializes and loads this objects' state based on @state
+
+ Args:
+ state (dict or n-array): Either:
+ - Keyword-mapped states of these objects, or
+ - encoded + serialized, 1D numerical np.array capturing this objects' state,
+ where n is @self.state_size
+ serialized (bool): If True, will interpret @state as a 1D numpy array. Otherwise, will assume the input is
+ a (potentially nested) dictionary of states for this objects
+ """
+ state=cls.deserialize(state=state)ifserializedelsestate
+ cls._load_state(state=state)
+
+ @classmethod
+ def_serialize(cls,state):
+"""
+ Serializes nested dictionary state @state into a flattened 1D numpy array for encoding efficiency.
+ Should be implemented by subclass.
+
+ Args:
+ state (dict): Keyword-mapped states of this objects to encode. Should match structure of output from
+ self._dump_state()
+
+ Returns:
+ n-array: encoded + serialized, 1D numerical np.array capturing this objects's state
+ """
+ raiseNotImplementedError()
+
+
[docs]@classmethod
+ defserialize(cls,state):
+"""
+ Serializes nested dictionary state @state into a flattened 1D numpy array for encoding efficiency.
+ Should be implemented by subclass.
+
+ Args:
+ state (dict): Keyword-mapped states of these objects to encode. Should match structure of output from
+ self._dump_state()
+
+ Returns:
+ n-array: encoded + serialized, 1D numerical np.array capturing this objects's state
+ """
+ # Simply returns self._serialize() for now. this is for future proofing
+ returncls._serialize(state=state)
+
+ @classmethod
+ def_deserialize(cls,state):
+"""
+ De-serializes flattened 1D numpy array @state into nested dictionary state.
+ Should be implemented by subclass.
+
+ Args:
+ state (n-array): encoded + serialized, 1D numerical np.array capturing this objects's state
+
+ Returns:
+ 2-tuple:
+ - dict: Keyword-mapped states of this objects. Should match structure of output from
+ self._dump_state()
+ - int: current index of the flattened state vector that is left off. This is helpful for subclasses
+ that inherit partial deserializations from parent classes, and need to know where the
+ deserialization left off before continuing.
+ """
+ raiseNotImplementedError
+
+
[docs]@classmethod
+ defdeserialize(cls,state):
+"""
+ De-serializes flattened 1D numpy array @state into nested dictionary state.
+ Should be implemented by subclass.
+
+ Args:
+ state (n-array): encoded + serialized, 1D numerical np.array capturing this objects's state
+
+ Returns:
+ dict: Keyword-mapped states of this objects. Should match structure of output from
+ self._dump_state()
+ """
+ # Sanity check the idx with the expected state size
+ state_dict,idx=cls._deserialize(state=state)
+ assertidx==cls.state_size,f'Invalid state deserialization occurred! Expected {cls.state_size} total ' \
+ f'values to be deserialized, only {idx} were.'
+
+ returnstate_dict
+
+
+
[docs]classWrapper:
+"""
+ Base class for all wrapper in OmniGibson
+
+ Args:
+ obj (any): Arbitrary python objects instance to wrap
+ """
+
+ def__init__(self,obj):
+ # Set the internal attributes -- store wrapped obj
+ self.wrapped_obj=obj
+
+ @classmethod
+ defclass_name(cls):
+ returncls.__name__
+
+ def_warn_double_wrap(self):
+"""
+ Utility function that checks if we're accidentally trying to double wrap an scenes
+ Raises:
+ Exception: [Double wrapping scenes]
+ """
+ obj=self.wrapped_obj
+ whileTrue:
+ ifisinstance(obj,Wrapper):
+ ifobj.class_name()==self.class_name():
+ raiseException('Attempted to double wrap with Wrapper: {}'.format(self.__class__.__name__))
+ obj=obj.wrapped_obj
+ else:
+ break
+
+ @property
+ defunwrapped(self):
+"""
+ Grabs unwrapped objects
+
+ Returns:
+ any: The unwrapped objects instance
+ """
+ returnself.wrapped_obj.unwrappedifhasattr(self.wrapped_obj,'unwrapped')elseself.wrapped_obj
+
+ # this method is a fallback option on any methods the original scenes might support
+ def__getattr__(self,attr):
+ # If we're querying wrapped_obj, raise an error
+ ifattr=='wrapped_obj':
+ raiseAttributeError('wrapped_obj attribute not initialized yet!')
+
+ # Sanity check to make sure wrapped obj is not None -- if so, raise error
+ assertself.wrapped_objisnotNone,f'Cannot access attribute {attr} since wrapped_obj is None!'
+
+ # using getattr ensures that both __getattribute__ and __getattr__ (fallback) get called
+ # (see https://stackoverflow.com/questions/3278077/difference-between-getattr-vs-getattribute)
+ orig_attr=getattr(self.wrapped_obj,attr)
+ ifcallable(orig_attr):
+
+ defhooked(*args,**kwargs):
+ result=orig_attr(*args,**kwargs)
+ # prevent wrapped_class from becoming unwrapped
+ ifid(result)==id(self.wrapped_obj):
+ returnself
+ returnresult
+
+ returnhooked
+ else:
+ returnorig_attr
+
+ def__setattr__(self,key,value):
+ # Call setattr on wrapped obj if it has the attribute, otherwise, operate on this objects
+ ifhasattr(self,'wrapped_obj')andself.wrapped_objisnotNoneandhasattr(self.wrapped_obj,key):
+ setattr(self.wrapped_obj,key,value)
+ else:
+ super().__setattr__(key,value)
+
+
+
[docs]defclear():
+"""
+ Clear state tied to singleton classes
+ """
+ NAMES.clear()
+ CLASS_NAMES.clear()
+# Copyright (c) 2022-2024, The ORBIT Project Developers.
+# All rights reserved.
+#
+# SPDX-License-Identifier: BSD-3-Clause
+# yapf: disable
+
+"""Submodule containing utilities for transforming strings and regular expressions."""
+
+importast
+importimportlib
+importinspect
+importre
+fromcollections.abcimportCallable,Sequence
+fromtypingimportAny
+
+"""
+String formatting.
+"""
+
+
+
[docs]defto_camel_case(snake_str:str,to:str='cC')->str:
+"""Converts a string from snake case to camel case.
+
+ Args:
+ snake_str: A string in snake case (i.e. with '_')
+ to: Convention to convert string to. Defaults to "cC".
+
+ Raises:
+ ValueError: Invalid input argument `to`, i.e. not "cC" or "CC".
+
+ Returns:
+ A string in camel-case format.
+ """
+ # check input is correct
+ iftonotin['cC','CC']:
+ msg='to_camel_case(): Choose a valid `to` argument (CC or cC)'
+ raiseValueError(msg)
+ # convert string to lower case and split
+ components=snake_str.lower().split('_')
+ ifto=='cC':
+ # We capitalize the first letter of each component except the first one
+ # with the 'title' method and join them together.
+ returncomponents[0]+''.join(x.title()forxincomponents[1:])
+ else:
+ # Capitalize first letter in all the components
+ return''.join(x.title()forxincomponents)
+
+
+
[docs]defto_snake_case(camel_str:str)->str:
+"""Converts a string from camel case to snake case.
+
+ Args:
+ camel_str: A string in camel case.
+
+ Returns:
+ A string in snake case (i.e. with '_')
+ """
+ camel_str=re.sub('(.)([A-Z][a-z]+)',r'\1_\2',camel_str)
+ returnre.sub('([a-z0-9])([A-Z])',r'\1_\2',camel_str).lower()
[docs]defis_lambda_expression(name:str)->bool:
+"""Checks if the input string is a lambda expression.
+
+ Args:
+ name: The input string.
+
+ Returns:
+ Whether the input string is a lambda expression.
+ """
+ try:
+ ast.parse(name)
+ returnisinstance(ast.parse(name).body[0],ast.Expr)andisinstance(ast.parse(name).body[0].value,ast.Lambda)
+ exceptSyntaxError:
+ returnFalse
+
+
+
[docs]defcallable_to_string(value:Callable)->str:
+"""Converts a callable object to a string.
+
+ Args:
+ value: A callable object.
+
+ Raises:
+ ValueError: When the input argument is not a callable object.
+
+ Returns:
+ A string representation of the callable object.
+ """
+ # check if callable
+ ifnotcallable(value):
+ raiseValueError(f'The input argument is not callable: {value}.')
+ # check if lambda function
+ ifvalue.__name__=='<lambda>':
+ returnf"lambda {inspect.getsourcelines(value)[0][0].strip().split('lambda')[1].strip().split(',')[0]}"
+ else:
+ # get the module and function name
+ module_name=value.__module__
+ function_name=value.__name__
+ # return the string
+ returnf'{module_name}:{function_name}'
+
+
+
[docs]defstring_to_callable(name:str)->Callable:
+"""Resolves the module and function names to return the function.
+
+ Args:
+ name: The function name. The format should be 'module:attribute_name' or a
+ lambda expression of format: 'lambda x: x'.
+
+ Raises:
+ ValueError: When the resolved attribute is not a function.
+ ValueError: When the module cannot be found.
+
+ Returns:
+ Callable: The function loaded from the module.
+ """
+ try:
+ ifis_lambda_expression(name):
+ callable_object=eval(name)
+ else:
+ mod_name,attr_name=name.split(':')
+ mod=importlib.import_module(mod_name)
+ callable_object=getattr(mod,attr_name)
+ # check if attribute is callable
+ ifcallable(callable_object):
+ returncallable_object
+ else:
+ raiseAttributeError(f"The imported object is not callable: '{name}'")
+ except(ValueError,ModuleNotFoundError)ase:
+ msg=(
+ f"Could not resolve the input string '{name}' into callable object."
+ " The format of input should be 'module:attribute_name'.\n"
+ f'Received the error:\n{e}.'
+ )
+ raiseValueError(msg)
+
+
+"""
+Regex operations.
+"""
+
+
+
[docs]defresolve_matching_names(
+ keys:str|Sequence[str],list_of_strings:Sequence[str],preserve_order:bool=False
+)->tuple[list[int],list[str]]:
+"""Match a list of query regular expressions against a list of strings and return the matched indices and names.
+
+ When a list of query regular expressions is provided, the function checks each target string against each
+ query regular expression and returns the indices of the matched strings and the matched strings.
+
+ If the :attr:`preserve_order` is True, the ordering of the matched indices and names is the same as the order
+ of the provided list of strings. This means that the ordering is dictated by the order of the target strings
+ and not the order of the query regular expressions.
+
+ If the :attr:`preserve_order` is False, the ordering of the matched indices and names is the same as the order
+ of the provided list of query regular expressions.
+
+ For example, consider the list of strings is ['a', 'b', 'c', 'd', 'e'] and the regular expressions are ['a|c', 'b'].
+ If :attr:`preserve_order` is False, then the function will return the indices of the matched strings and the
+ strings as: ([0, 1, 2], ['a', 'b', 'c']). When :attr:`preserve_order` is True, it will return them as:
+ ([0, 2, 1], ['a', 'c', 'b']).
+
+ Note:
+ The function does not sort the indices. It returns the indices in the order they are found.
+
+ Args:
+ keys: A regular expression or a list of regular expressions to match the strings in the list.
+ list_of_strings: A list of strings to match.
+ preserve_order: Whether to preserve the order of the query keys in the returned values. Defaults to False.
+
+ Returns:
+ A tuple of lists containing the matched indices and names.
+
+ Raises:
+ ValueError: When multiple matches are found for a string in the list.
+ ValueError: When not all regular expressions are matched.
+ """
+ # resolve name keys
+ ifisinstance(keys,str):
+ keys=[keys]
+ # find matching patterns
+ index_list=[]
+ names_list=[]
+ key_idx_list=[]
+ # book-keeping to check that we always have a one-to-one mapping
+ # i.e. each target string should match only one regular expression
+ target_strings_match_found=[Nonefor_inrange(len(list_of_strings))]
+ keys_match_found=[[]for_inrange(len(keys))]
+ # loop over all target strings
+ fortarget_index,potential_match_stringinenumerate(list_of_strings):
+ forkey_index,re_keyinenumerate(keys):
+ ifre.fullmatch(re_key,potential_match_string):
+ # check if match already found
+ iftarget_strings_match_found[target_index]:
+ raiseValueError(
+ f"Multiple matches for '{potential_match_string}':"
+ f" '{target_strings_match_found[target_index]}' and '{re_key}'!"
+ )
+ # add to list
+ target_strings_match_found[target_index]=re_key
+ index_list.append(target_index)
+ names_list.append(potential_match_string)
+ key_idx_list.append(key_index)
+ # add for regex key
+ keys_match_found[key_index].append(potential_match_string)
+ # reorder keys if they should be returned in order of the query keys
+ ifpreserve_order:
+ reordered_index_list=[None]*len(index_list)
+ global_index=0
+ forkey_indexinrange(len(keys)):
+ forkey_idx_position,key_idx_entryinenumerate(key_idx_list):
+ ifkey_idx_entry==key_index:
+ reordered_index_list[key_idx_position]=global_index
+ global_index+=1
+ # reorder index and names list
+ index_list_reorder=[None]*len(index_list)
+ names_list_reorder=[None]*len(index_list)
+ foridx,reorder_idxinenumerate(reordered_index_list):
+ index_list_reorder[reorder_idx]=index_list[idx]
+ names_list_reorder[reorder_idx]=names_list[idx]
+ # update
+ index_list=index_list_reorder
+ names_list=names_list_reorder
+ # check that all regular expressions are matched
+ ifnotall(keys_match_found):
+ # make this print nicely aligned for debugging
+ msg='\n'
+ forkey,valueinzip(keys,keys_match_found):
+ msg+=f'\t{key}: {value}\n'
+ msg+=f'Available strings: {list_of_strings}\n'
+ # raise error
+ raiseValueError(
+ f'Not all regular expressions are matched! Please check that the regular expressions are correct: {msg}'
+ )
+ # return
+ returnindex_list,names_list
+
+
+
[docs]defresolve_matching_names_values(
+ data:dict[str,Any],list_of_strings:Sequence[str],preserve_order:bool=False
+)->tuple[list[int],list[str],list[Any]]:
+"""Match a list of regular expressions in a dictionary against a list of strings and return
+ the matched indices, names, and values.
+
+ If the :attr:`preserve_order` is True, the ordering of the matched indices and names is the same as the order
+ of the provided list of strings. This means that the ordering is dictated by the order of the target strings
+ and not the order of the query regular expressions.
+
+ If the :attr:`preserve_order` is False, the ordering of the matched indices and names is the same as the order
+ of the provided list of query regular expressions.
+
+ For example, consider the dictionary is {"a|d|e": 1, "b|c": 2}, the list of strings is ['a', 'b', 'c', 'd', 'e'].
+ If :attr:`preserve_order` is False, then the function will return the indices of the matched strings, the
+ matched strings, and the values as: ([0, 1, 2, 3, 4], ['a', 'b', 'c', 'd', 'e'], [1, 2, 2, 1, 1]). When
+ :attr:`preserve_order` is True, it will return them as: ([0, 3, 4, 1, 2], ['a', 'd', 'e', 'b', 'c'], [1, 1, 1, 2, 2]).
+
+ Args:
+ data: A dictionary of regular expressions and values to match the strings in the list.
+ list_of_strings: A list of strings to match.
+ preserve_order: Whether to preserve the order of the query keys in the returned values. Defaults to False.
+
+ Returns:
+ A tuple of lists containing the matched indices, names, and values.
+
+ Raises:
+ TypeError: When the input argument :attr:`data` is not a dictionary.
+ ValueError: When multiple matches are found for a string in the dictionary.
+ ValueError: When not all regular expressions in the data keys are matched.
+ """
+ # check valid input
+ ifnotisinstance(data,dict):
+ raiseTypeError(f'Input argument `data` should be a dictionary. Received: {data}')
+ # find matching patterns
+ index_list=[]
+ names_list=[]
+ values_list=[]
+ key_idx_list=[]
+ # book-keeping to check that we always have a one-to-one mapping
+ # i.e. each target string should match only one regular expression
+ target_strings_match_found=[Nonefor_inrange(len(list_of_strings))]
+ keys_match_found=[[]for_inrange(len(data))]
+ # loop over all target strings
+ fortarget_index,potential_match_stringinenumerate(list_of_strings):
+ forkey_index,(re_key,value)inenumerate(data.items()):
+ ifre.fullmatch(re_key,potential_match_string):
+ # check if match already found
+ iftarget_strings_match_found[target_index]:
+ raiseValueError(
+ f"Multiple matches for '{potential_match_string}':"
+ f" '{target_strings_match_found[target_index]}' and '{re_key}'!"
+ )
+ # add to list
+ target_strings_match_found[target_index]=re_key
+ index_list.append(target_index)
+ names_list.append(potential_match_string)
+ values_list.append(value)
+ key_idx_list.append(key_index)
+ # add for regex key
+ keys_match_found[key_index].append(potential_match_string)
+ # reorder keys if they should be returned in order of the query keys
+ ifpreserve_order:
+ reordered_index_list=[None]*len(index_list)
+ global_index=0
+ forkey_indexinrange(len(data)):
+ forkey_idx_position,key_idx_entryinenumerate(key_idx_list):
+ ifkey_idx_entry==key_index:
+ reordered_index_list[key_idx_position]=global_index
+ global_index+=1
+ # reorder index and names list
+ index_list_reorder=[None]*len(index_list)
+ names_list_reorder=[None]*len(index_list)
+ values_list_reorder=[None]*len(index_list)
+ foridx,reorder_idxinenumerate(reordered_index_list):
+ index_list_reorder[reorder_idx]=index_list[idx]
+ names_list_reorder[reorder_idx]=names_list[idx]
+ values_list_reorder[reorder_idx]=values_list[idx]
+ # update
+ index_list=index_list_reorder
+ names_list=names_list_reorder
+ values_list=values_list_reorder
+ # check that all regular expressions are matched
+ ifnotall(keys_match_found):
+ # make this print nicely aligned for debugging
+ msg='\n'
+ forkey,valueinzip(data.keys(),keys_match_found):
+ msg+=f'\t{key}: {value}\n'
+ msg+=f'Available strings: {list_of_strings}\n'
+ # raise error
+ raiseValueError(
+ f'Not all regular expressions are matched! Please check that the regular expressions are correct: {msg}'
+ )
+ # return
+ returnindex_list,names_list,values_list
A nested dictionary containing the conversion functions for each backend.
+
The keys of the outer dictionary are the name of target backend (“numpy”, “torch”, “warp”). The keys of the
+inner dictionary are the source backend (np.ndarray, torch.Tensor, wp.array).
The function tries to convert the array to a torch tensor. If the array is a numpy/warp arrays, or python
+list/tuples, it is converted to a torch tensor. If the array is already a torch tensor, it is returned
+directly.
+
If device is None, then the function deduces the current device of the data. For numpy arrays,
+this defaults to “cpu”, for torch tensors it is “cpu” or “cuda”, and for warp arrays it is “cuda”.
+
+
Note
+
Since PyTorch does not support unsigned integer types, unsigned integer arrays are converted to
+signed integer arrays. This is done by casting the array to the corresponding signed integer type.
+
+
+
Parameters:
+
+
array – The input array. It can be a numpy array, warp array, python list/tuple, or torch tensor.
+
dtype – Target data-type for the tensor.
+
device – The target device for the tensor. Defaults to None.
Wrapper around dataclass functionality to add extra checks and utilities.
+
As of Python 3.7, the standard dataclasses have two main issues which makes them non-generic for
+configuration use-cases. These include:
+
+
Requiring a type annotation for all its members.
+
Requiring explicit usage of field(default_factory=...)() to reinitialize mutable variables.
+
+
This function provides a decorator that wraps around Python’s dataclass utility to deal with
+the above two issues. It also provides additional helper functions for dictionary <-> class
+conversion and easily copying class instances.
+
Usage:
+
fromdataclassesimportMISSING
+
+fromomni.isaac.orbit.utils.configclassimportconfigclass
+
+
+@configclass
+classViewerCfg:
+ eye:list=[7.5,7.5,7.5]# field missing on purpose
+ lookat:list=field(default_factory=[0.0,0.0,0.0])
+
+
+@configclass
+classEnvCfg:
+ num_envs:int=MISSING
+ episode_length:int=2000
+ viewer:ViewerCfg=ViewerCfg()
+
+# create configuration instance
+env_cfg=EnvCfg(num_envs=24)
+
+# print information as a dictionary
+print(env_cfg.to_dict())
+
+# create a copy of the configuration
+env_cfg_copy=env_cfg.copy()
+
+# replace arbitrary fields using keyword arguments
+env_cfg_copy=env_cfg_copy.replace(num_envs=32)
+
+
+
+
Parameters:
+
+
cls – The class to wrap around.
+
**kwargs – Additional arguments to pass to dataclass().
Convert all arrays or tensors in a dictionary to a given backend.
+
This function iterates over the dictionary, converts all arrays or tensors with the given types to
+the desired backend, and stores them in a new dictionary. It also works with nested dictionaries.
+
Currently supported backends are “numpy”, “torch”, and “warp”.
+
+
Note
+
This function only converts arrays or tensors. Other types of data are left unchanged. Mutable types
+(e.g. lists) are referenced by the new dictionary, so they are not copied.
+
+
+
Parameters:
+
+
data – An input dict containing array or tensor data as values.
+
backend – The backend (“numpy”, “torch”, “warp”) to which arrays in this dict should be converted.
+Defaults to “numpy”.
+
array_types – A list containing the types of arrays that should be converted to
+the desired backend. Defaults to (“numpy”, “torch”, “warp”).
+
+
+
Raises:
+
ValueError – If the specified backend or array_types are unknown, i.e. not in the list of supported
+ backends (“numpy”, “torch”, “warp”).
+
+
Returns:
+
The updated dict with the data converted to the desired backend.
Get the pick position for a manipulator robots to pick an objects at prim_path.
+The pick position is simply the nearest top vertex of the objects’s bounding box.
+
+
Parameters:
+
+
robot_base_position (np.ndarray) – robots base position.
Grabs relevant initialization information for this class instance. Useful for directly
+reloading an objects from this information, using @create_object_from_init_info.
+
+
Returns:
+
Nested dictionary that contains this objects’ initialization information
Deserializes and loads this objects’ state based on @state
+
+
Parameters:
+
+
state (dict or n-array) –
Either:
+- Keyword-mapped states of these objects, or
+- encoded + serialized, 1D numerical np.array capturing this objects’ state,
+
+
where n is @self.state_size
+
+
+
serialized (bool) – If True, will interpret @state as a 1D numpy array. Otherwise,
+will assume the input is a (potentially nested) dictionary of states for this objects
Deserializes and loads this objects’ state based on @state
+
+
Parameters:
+
+
state (dict or n-array) –
Either:
+- Keyword-mapped states of these objects, or
+- encoded + serialized, 1D numerical np.array capturing this objects’ state,
+
+
where n is @self.state_size
+
+
+
serialized (bool) – If True, will interpret @state as a 1D numpy array. Otherwise, will assume the input is
+a (potentially nested) dictionary of states for this objects
Checks if self.name exists in the global NAMES registry, and deletes it if so. Possibly also iterates through
+all owned member variables and checks for their corresponding names if @include_all_owned is True.
+
+
Parameters:
+
+
include_all_owned (bool) – If True, will iterate through all owned members of this instance and remove their
+names as well, if they are UniquelyNamed
+
skip_ids (None or set of int) – If specified, will skip over any ids in the specified set that are matched
+to any attributes found (this compares id(attr) to @skip_ids).
Helper function to create a class with str type @cls_name, which should be a valid entry in @cls_registry, using
+kwargs in dictionary form @cfg to pass to the constructor, with @cls_type_name specified for debugging
+
+
Parameters:
+
+
cls_name (str) – Name of the class to create. This should correspond to the actual class type, in string form
+
cls_registry (dict) – Class registry. This should map string names of valid classes to create to the
+actual class type itself
+
cfg (dict) – Any keyword arguments to pass to the class constructor
+
cls_type_descriptor (str) – Description of the class type being created. This can be any string and is used
+solely for debugging purposes
Helper function to return a dictionary of key-values that specifically correspond to @cls class’s __init__
+constructor method, from @dic which may or may not contain additional, irrelevant kwargs.
+Note that @dic may possibly be missing certain kwargs as specified by cls.__init__. No error will be raised.
+
+
Parameters:
+
+
cls (object) – Class from which to grab __init__ kwargs that will be be used as filtering keys for @dic
Helper function to extract a subset of dictionary key-values from a current dictionary. Optionally (deep)copies
+the values extracted from the original @dic if @copy is True.
base_classes (type, or list of type) – Base class(es) to use for generating the subclass
+
__init__ (None or function) – Init call to use for the base class when it is instantiated. If None if specified,
+the newly generated class will automatically inherit the __init__ call from @base_classes
+
**kwargs (any) – keyword-mapped parameters to override / set in the child class, where the keys represent
+the class / instance attribute to modify and the values represent the functions / value to set
Match a list of query regular expressions against a list of strings and return the matched indices and names.
+
When a list of query regular expressions is provided, the function checks each target string against each
+query regular expression and returns the indices of the matched strings and the matched strings.
+
If the preserve_order is True, the ordering of the matched indices and names is the same as the order
+of the provided list of strings. This means that the ordering is dictated by the order of the target strings
+and not the order of the query regular expressions.
+
If the preserve_order is False, the ordering of the matched indices and names is the same as the order
+of the provided list of query regular expressions.
+
For example, consider the list of strings is [‘a’, ‘b’, ‘c’, ‘d’, ‘e’] and the regular expressions are [‘a|c’, ‘b’].
+If preserve_order is False, then the function will return the indices of the matched strings and the
+strings as: ([0, 1, 2], [‘a’, ‘b’, ‘c’]). When preserve_order is True, it will return them as:
+([0, 2, 1], [‘a’, ‘c’, ‘b’]).
+
+
Note
+
The function does not sort the indices. It returns the indices in the order they are found.
+
+
+
Parameters:
+
+
keys – A regular expression or a list of regular expressions to match the strings in the list.
+
list_of_strings – A list of strings to match.
+
preserve_order – Whether to preserve the order of the query keys in the returned values. Defaults to False.
+
+
+
Returns:
+
A tuple of lists containing the matched indices and names.
+
+
Raises:
+
+
ValueError – When multiple matches are found for a string in the list.
+
ValueError – When not all regular expressions are matched.
Match a list of regular expressions in a dictionary against a list of strings and return
+the matched indices, names, and values.
+
If the preserve_order is True, the ordering of the matched indices and names is the same as the order
+of the provided list of strings. This means that the ordering is dictated by the order of the target strings
+and not the order of the query regular expressions.
+
If the preserve_order is False, the ordering of the matched indices and names is the same as the order
+of the provided list of query regular expressions.
+
For example, consider the dictionary is {“a|d|e”: 1, “b|c”: 2}, the list of strings is [‘a’, ‘b’, ‘c’, ‘d’, ‘e’].
+If preserve_order is False, then the function will return the indices of the matched strings, the
+matched strings, and the values as: ([0, 1, 2, 3, 4], [‘a’, ‘b’, ‘c’, ‘d’, ‘e’], [1, 2, 2, 1, 1]). When
+preserve_order is True, it will return them as: ([0, 3, 4, 1, 2], [‘a’, ‘d’, ‘e’, ‘b’, ‘c’], [1, 1, 1, 2, 2]).
+
+
Parameters:
+
+
data – A dictionary of regular expressions and values to match the strings in the list.
+
list_of_strings – A list of strings to match.
+
preserve_order – Whether to preserve the order of the query keys in the returned values. Defaults to False.
+
+
+
Returns:
+
A tuple of lists containing the matched indices, names, and values.
+
+
Raises:
+
+
TypeError – When the input argument data is not a dictionary.
+
ValueError – When multiple matches are found for a string in the dictionary.
+
ValueError – When not all regular expressions in the data keys are matched.
then follow this instruction to get robot description
+
my description is like this:
+
# The robot description defines the generalized coordinates and how to map those
+# to the underlying URDF dofs.
+
+api_version:1.0
+
+# Defines the generalized coordinates. Each generalized coordinate is assumed
+# to have an entry in the URDF.
+# Lula will only use these joints to control the robot position.
+cspace:
+-joint2_to_joint1
+-joint3_to_joint2
+-joint4_to_joint3
+-joint5_to_joint4
+-joint6_to_joint5
+-joint6output_to_joint6
+
+root_link:g_base
+
+default_q:[
+0.0,-0.0,-0.0,-0.0,0.0,-0.0
+]
+
+# Most dimensions of the cspace have a direct corresponding element
+# in the URDF. This list of rules defines how unspecified coordinates
+# should be extracted or how values in the URDF should be overwritten.
+
+cspace_to_urdf_rules:
+
+# Lula uses collision spheres to define the robot geometry in order to avoid
+# collisions with external obstacles. If no spheres are specified, Lula will
+# not be able to avoid obstacles.
+
+collision_spheres:
+-joint1:
+-"center":[0.0,0.0,0.039]
+"radius":0.035
+-joint2:
+-"center":[0.0,0.0,0.0]
+"radius":0.02
+-"center":[0.0,0.0,-0.045]
+"radius":0.02
+-"center":[0.0,0.0,-0.011]
+"radius":0.02
+-"center":[0.0,0.0,-0.023]
+"radius":0.02
+-"center":[0.0,0.0,-0.034]
+"radius":0.02
+-joint4:
+-"center":[0.0,0.0,0.0]
+"radius":0.02
+-"center":[-0.094,-0.0,-0.0]
+"radius":0.02
+-"center":[-0.016,-0.0,-0.0]
+"radius":0.02
+-"center":[-0.031,-0.0,-0.0]
+"radius":0.02
+-"center":[-0.047,-0.0,-0.0]
+"radius":0.02
+-"center":[-0.063,-0.0,-0.0]
+"radius":0.02
+-"center":[-0.078,-0.0,-0.0]
+"radius":0.02
+-joint3:
+-"center":[-0.0,-0.0,0.064]
+"radius":0.02
+-"center":[-0.107,-0.0,0.064]
+"radius":0.02
+-"center":[-0.018,-0.0,0.064]
+"radius":0.02
+-"center":[-0.036,-0.0,0.064]
+"radius":0.02
+-"center":[-0.053,-0.0,0.064]
+"radius":0.02
+-"center":[-0.071,-0.0,0.064]
+"radius":0.02
+-"center":[-0.089,-0.0,0.064]
+"radius":0.02
+-joint5:
+-"center":[0.0,0.0,0.0]
+"radius":0.02
+-joint6:
+-"center":[0.0,0.0,0.0]
+"radius":0.02
+
Navigate to GRUtopia root path and configure the conda environment.
+
$cdPATH/TO/GRUTOPIA/ROOT
+
+# Conda environment will be created and configured automatically with prompt.
+$./setup_conda.sh
+
+$cd..&&condaactivategrutopia# or your conda env name
+
# decompress the house scene
+$cdPATH/TO/GRUTOPIA/ROOT
+$cdassets/scenes/
+$unzipdemo_house.zip
+# start simulation
+$cd../../..
+$python./GRUtopia/demo/h1_house.py
+
+
+
You can control the h1 robot with keyboard command:
+
+
W: Move Forward
+
S: Move Backward
+
A: Move Left
+
D: Move Right
+
Q: Turn Left
+
E: Turn Right
+
+
You can change camera view to perspective/first-person/third-person camera.
# decompress the city scene
+$cdPATH/TO/GRUTOPIA/ROOT
+$cdassets/scenes/
+$unzipdemo_city.zip
+# start simulation
+$cd../../..
+$python./GRUtopia/demo/h1_city.py
+
+
+
You can control the h1 robot with keyboard command:
+
+
W: Move Forward
+
S: Move Backward
+
A: Move Left
+
D: Move Right
+
Q: Turn Left
+
E: Turn Right
+
+
You can change camera view to perspective/first-person/third-person camera.
GPT-4o is used as npc by default so an openai api key is required.
+
Run inside container:
+
# run inside container
+$sed-i's/YOUR_OPENAI_API_KEY/{YOUR_OPENAI_API_KEY}/g'GRUtopia/demo/config/h1_npc.yaml# set openai api key
+$pythonGRUtopia/demo/h1_npc.py# start simulation
+
+
+
Now the simulation is available through WebRTC in the WebUI page.
+
You can control the h1 robot with keyboard command:
+
+
W: Move Forward
+
S: Move Backward
+
A: Move Left
+
D: Move Right
+
Q: Turn Left
+
E: Turn Right
+
+
And you can talk to npc as agent in the chatbox. The left side of the screen will display Isaac Sim’s window, where you can switch to the robot’s camera view. The right side features the chat window, where you can interact with the NPC. Ensure your questions are related to the scene, the robot’s view, or its position, as unrelated queries might not yield useful responses. Replies will appear in the chat window within a few seconds. During this time, you can continue moving the robot or ask additional questions, which will be answered sequentially.
+
Note that the NPC might not always provide accurate answers due to design limitations.
+
Occasionally, unexpected responses from the LLM or code errors may cause issues. Check the error logs or contact us for support in resolving these problems.
Current embodied intelligence research urgently needs to overcome the problem of disconnection between high-level perceptual planning and low-level motion control. By constructing a highly realistic simulation environment, not only can it enhance the robot’s perception and behavior planning capabilities but also promote the development of multi-module collaborative control strategies, ultimately steadily advancing towards the goal of general-purpose embodied robots. (Research Needs)
+
High-level studies are typically conducted on static datasets or simulation platforms, which often cannot provide environments with both visual realism and physical realism at the same time, limiting the transferability of research results to real-world application scenarios. At the same time, the development of large-model technology has opened up new paths for improving the perception and behavior planning abilities of robots, making the realization of the goal of universal robots no longer distant. (Industry Status)
+
To address these challenges, the OpenRobotLab team of Shanghai AI Lab proposes the GRUtopia Embodied Intelligence Simulation Platform. The platform features:
+
+
A large-scale scene dataset covering various application scenarios, capable of providing rich and realistic visual and physical environments for embodied research;
+
An API library and extensive toolkit containing mainstream robotic control algorithms, enabling plug-and-play functionality with just one line of code to achieve a realistic control process, reproducing all kinds of actual situations likely encountered during planning processes;
+
The toolkit also provides functions like algorithm migration and strategy training.
+
+
Additionally, there is a task generation system for embodied tasks driven by large models and an NPC interaction system, marking the first time automated embodied task generation and multimodal interactive NPCs have been achieved. This offers infinite training tasks for developing generalized agents and also serves as a foundation for studying embodied behavior interpretability and human-machine interactions. (Achievement Definition)
Controllers usually control joints of robot. Also, They’re the entries of robot actions. To make robot move, rab, or
+even speak, chat online, we use controllers.
Our system message and in-context example are defined in grutopia.npc.prompt. And the LLM inference process are in grutopia.npc.llm_caller. You can customize them according to your own needs and algorithms.
When we run demo/h1_locomotion.py, observation from sensors can be got from obs (obs = env.step(actions=env_actions))
+
Use them in isaac simulation_app’s step loops.
+
whileenv.simulation_app.is_running():
+ ...
+ obs=env.step(actions)
+ photo=obs['robot_name_in_config']['camera']['rgba']# here get `camera` data
+ ...
+env.simulation_app.close()
+