Python源码示例:rospy.Time()

示例1
def send_transform(self, translation, rotation, time, child, parent):
        """
        :param translation: the translation of the transformation as a tuple (x, y, z)
        :param rotation: the rotation of the transformation as a tuple (x, y, z, w)
        :param time: the time of the transformation, as a rospy.Time()
        :param child: child frame in tf, string
        :param parent: parent frame in tf, string

        Broadcast the transformation from tf frame child to parent on ROS topic ``"/tf"``.
        """

        t = TransformStamped()
        t.header.frame_id = parent
        t.header.stamp = time
        t.child_frame_id = child
        t.transform.translation.x = translation[0]
        t.transform.translation.y = translation[1]
        t.transform.translation.z = translation[2]

        t.transform.rotation.x = rotation[0]
        t.transform.rotation.y = rotation[1]
        t.transform.rotation.z = rotation[2]
        t.transform.rotation.w = rotation[3]

        self.send_transform_message(t) 
示例2
def _publish_diagnostics(self):
        # alias
        diag_status = self._diag_array.status[0]

        # fill diagnostics array
        battery_voltage = self._cozmo.battery_voltage
        diag_status.values[0].value = '{:.2f} V'.format(battery_voltage)
        diag_status.values[1].value = '{:.2f} deg'.format(self._cozmo.head_angle.degrees)
        diag_status.values[2].value = '{:.2f} mm'.format(self._cozmo.lift_height.distance_mm)
        if battery_voltage > 3.5:
            diag_status.level = DiagnosticStatus.OK
            diag_status.message = 'Everything OK!'
        elif battery_voltage > 3.4:
            diag_status.level = DiagnosticStatus.WARN
            diag_status.message = 'Battery low! Go charge soon!'
        else:
            diag_status.level = DiagnosticStatus.ERROR
            diag_status.message = 'Battery very low! Cozmo will power off soon!'

        # update message stamp and publish
        self._diag_array.header.stamp = rospy.Time.now()
        self._diag_pub.publish(self._diag_array) 
示例3
def _publish_joint_state(self):
        """
        Publish joint states as JointStates.

        """
        # only publish if we have a subscriber
        if self._joint_state_pub.get_num_connections() == 0:
            return

        js = JointState()
        js.header.stamp = rospy.Time.now()
        js.header.frame_id = 'cozmo'
        js.name = ['head', 'lift']
        js.position = [self._cozmo.head_angle.radians,
                       self._cozmo.lift_height.distance_mm * 0.001]
        js.velocity = [0.0, 0.0]
        js.effort = [0.0, 0.0]
        self._joint_state_pub.publish(js) 
示例4
def _publish_battery(self):
        """
        Publish battery as BatteryState message.

        """
        # only publish if we have a subscriber
        if self._battery_pub.get_num_connections() == 0:
            return

        battery = BatteryState()
        battery.header.stamp = rospy.Time.now()
        battery.voltage = self._cozmo.battery_voltage
        battery.present = True
        if self._cozmo.is_on_charger:  # is_charging always return False
            battery.power_supply_status = BatteryState.POWER_SUPPLY_STATUS_CHARGING
        else:
            battery.power_supply_status = BatteryState.POWER_SUPPLY_STATUS_NOT_CHARGING
        self._battery_pub.publish(battery) 
示例5
def _publish_odometry(self):
        """
        Publish current pose as Odometry message.

        """
        # only publish if we have a subscriber
        if self._odom_pub.get_num_connections() == 0:
            return

        now = rospy.Time.now()
        odom = Odometry()
        odom.header.frame_id = self._odom_frame
        odom.header.stamp = now
        odom.child_frame_id = self._footprint_frame
        odom.pose.pose.position.x = self._cozmo.pose.position.x * 0.001
        odom.pose.pose.position.y = self._cozmo.pose.position.y * 0.001
        odom.pose.pose.position.z = self._cozmo.pose.position.z * 0.001
        q = quaternion_from_euler(.0, .0, self._cozmo.pose_angle.radians)
        odom.pose.pose.orientation.x = q[0]
        odom.pose.pose.orientation.y = q[1]
        odom.pose.pose.orientation.z = q[2]
        odom.pose.pose.orientation.w = q[3]
        odom.pose.covariance = np.diag([1e-2, 1e-2, 1e-2, 1e3, 1e3, 1e-1]).ravel()
        odom.twist.twist.linear.x = self._lin_vel
        odom.twist.twist.angular.z = self._ang_vel
        odom.twist.covariance = np.diag([1e-2, 1e3, 1e3, 1e3, 1e3, 1e-2]).ravel()
        self._odom_pub.publish(odom) 
示例6
def convert_pose(pose, from_frame, to_frame):
    """
    Convert a pose or transform between frames using tf.
        pose            -> A geometry_msgs.msg/Pose that defines the robots position and orientation in a reference_frame
        from_frame      -> A string that defines the original reference_frame of the robot
        to_frame        -> A string that defines the desired reference_frame of the robot to convert to
    """
    global tfBuffer, listener

    # Create Listener objet to recieve and buffer transformations
    trans = None

    try:
        trans = tfBuffer.lookup_transform(to_frame, from_frame, rospy.Time(0), rospy.Duration(1.0))
    except (tf2_ros.LookupException, tf2_ros.ConnectivityException, tf2_ros.ExtrapolationException), e:
        print(e)
        rospy.logerr('FAILED TO GET TRANSFORM FROM %s to %s' % (to_frame, from_frame))
        return None 
示例7
def goto_joint_positions(self, positions_goal):

        positions_cur = self.get_joint_positions()
        assert len(positions_goal) == len(positions_cur)

        duration = norm((r_[positions_goal] - r_[positions_cur]) / self.vel_limits, ord=inf)

        jt = tm.JointTrajectory()
        jt.joint_names = self.joint_names
        jt.header.stamp = rospy.Time.now()

        jtp = tm.JointTrajectoryPoint()
        jtp.positions = positions_goal
        jtp.velocities = zeros(len(positions_goal))
        jtp.time_from_start = rospy.Duration(duration)

        jt.points = [jtp]
        self.controller_pub.publish(jt)

        rospy.loginfo("%s: starting %.2f sec traj", self.controller_name, duration)
        self.pr2.start_thread(JustWaitThread(duration)) 
示例8
def follow_timed_joint_trajectory(self, positions, velocities, times):

        jt = tm.JointTrajectory()
        jt.joint_names = self.joint_names
        jt.header.stamp = rospy.Time.now()

        for (position, velocity, time) in zip(positions, velocities, times):
            jtp = tm.JointTrajectoryPoint()
            jtp.positions = position
            jtp.velocities = velocity
            jtp.time_from_start = rospy.Duration(time)
            jt.points.append(jtp)

        self.controller_pub.publish(jt)
        rospy.loginfo("%s: starting %.2f sec traj", self.controller_name, times[-1])
        self.pr2.start_thread(JustWaitThread(times[-1])) 
示例9
def follow_timed_trajectory(self, times, angs):
        times_up = np.arange(0, times[-1] + 1e-4, .1)
        angs_up = np.interp(times_up, times, angs)

        jt = tm.JointTrajectory()
        jt.header.stamp = rospy.Time.now()
        jt.joint_names = ["%s_gripper_joint" % self.lr]
        for (t, a) in zip(times, angs):
            jtp = tm.JointTrajectoryPoint()
            jtp.time_from_start = rospy.Duration(t)
            jtp.positions = [a]
            jt.points.append(jtp)
        self.diag_pub.publish(jt)

        self.pr2.start_thread(GripperTrajectoryThread(self, times_up, angs_up))
        # self.pr2.start_thread(GripperTrajectoryThread(self, times, angs)) 
示例10
def __init__(self, limb, joint_names):
        self._joint_names = joint_names
        ns = 'robot/limb/' + limb + '/'
        self._client = actionlib.SimpleActionClient(
            ns + "follow_joint_trajectory",
            FollowJointTrajectoryAction,
        )
        self._goal = FollowJointTrajectoryGoal()
        self._goal_time_tolerance = rospy.Time(0.1)
        self._goal.goal_time_tolerance = self._goal_time_tolerance
        server_up = self._client.wait_for_server(timeout=rospy.Duration(10.0))
        if not server_up:
            rospy.logerr("Timed out waiting for Joint Trajectory"
                         " Action Server to connect. Start the action server"
                         " before running example.")
            rospy.signal_shutdown("Timed out waiting for Action Server")
            sys.exit(1)
        self.clear(limb) 
示例11
def __init__(self, P=1.0, I=0.0, D=0.0):
        self.P = P
        self.I = I
        self.D = D

        self.maxP = sys.float_info.max
        self.maxI = sys.float_info.max
        self.maxD = sys.float_info.max
        self.maxTotal = sys.float_info.max

        # Useful for I part
        self.error_sum = 0.0

        # Useful for D part
        self.last_time = rospy.Time.now()
        self.last_error = 0.0# sys.float_info.max
        self.last_output = 0.0
        return 
示例12
def _LocalToWorld(self,pose):
        """
        Transform a pose from local frame to world frame
        @param pose The 4x4 transformation matrix containing the pose to transform
        @return The 4x4 transformation matrix describing the pose in world frame
        """
        import rospy
        #Get pose w.r.t world frame
        self.listener.waitForTransform(self.world_frame,self.detection_frame,
                                       rospy.Time(),rospy.Duration(10))
        t, r = self.listener.lookupTransform(self.world_frame,self.detection_frame,
                                             rospy.Time(0))
        
        #Get relative transform between frames
        offset_to_world = numpy.matrix(transformations.quaternion_matrix(r))
        offset_to_world[0,3] = t[0]
        offset_to_world[1,3] = t[1]
        offset_to_world[2,3] = t[2]
        
        #Compose with pose to get pose in world frame
        result = numpy.array(numpy.dot(offset_to_world, pose))
        
        return result 
示例13
def _LocalToWorld(self,pose):
        """
        Transform a pose from local frame to world frame
        @param pose The 4x4 transformation matrix containing the pose to transform
        @return The 4x4 transformation matrix describing the pose in world frame
        """
        #Get pose w.r.t world frame
        self.listener.waitForTransform(self.world_frame,self.detection_frame,
                                       rospy.Time(),rospy.Duration(10))
        t, r = self.listener.lookupTransform(self.world_frame,self.detection_frame,
                                             rospy.Time(0))
        
        #Get relative transform between frames
        offset_to_world = numpy.matrix(transformations.quaternion_matrix(r))
        offset_to_world[0,3] = t[0]
        offset_to_world[1,3] = t[1]
        offset_to_world[2,3] = t[2]
        
        #Compose with pose to get pose in world frame
        result = numpy.array(numpy.dot(offset_to_world, pose))
        
        return result 
示例14
def get_grip_transform(self, ws_name, robot_pose):
        """
        Retrieves the transform needed to create a grip supposing the object
        is placed on the origin of the given workspace.

        :param ws_name: name of the workspace the object is placed on
        :param robot_pose: pose of the robot's tool_link
        """
        # First apply transform for robot pose
        base_link_to_tool_link = self.transform_from_euler(
            robot_pose.position.x, robot_pose.position.y, robot_pose.position.z,
            robot_pose.rpy.roll, robot_pose.rpy.pitch, robot_pose.rpy.yaw,
            "base_link", "tool_link"
        )
        self.__tf_buffer.set_transform(base_link_to_tool_link,
                                       "default_authority")

        # Manually place object on origin
        self.set_relative_pose_object(ws_name, 0, 0, 0)

        # Lookup the grip
        t = self.__tf_buffer.lookup_transform("object_base", "tool_link",
                                              rospy.Time(0))
        t.child_frame_id = "tool_link_target"
        return t 
示例15
def convert_pose(pose, from_frame, to_frame):
    """
    Convert a pose or transform between frames using tf.
        pose            -> A geometry_msgs.msg/Pose that defines the robots position and orientation in a reference_frame
        from_frame      -> A string that defines the original reference_frame of the robot
        to_frame        -> A string that defines the desired reference_frame of the robot to convert to
    """
    global tfBuffer, listener

    if tfBuffer is None or listener is None:
        _init_tf()

    try:
        trans = tfBuffer.lookup_transform(to_frame, from_frame, rospy.Time(0), rospy.Duration(1.0))
    except (tf2_ros.LookupException, tf2_ros.ConnectivityException, tf2_ros.ExtrapolationException), e:
        print(e)
        rospy.logerr('FAILED TO GET TRANSFORM FROM %s to %s' % (to_frame, from_frame))
        return None 
示例16
def publish_stamped_transform(stamped_transform, seconds=1):
    """
    Publish a stamped transform for debugging purposes.
        stamped_transform       -> A geometry_msgs/TransformStamped to be published
        seconds                 -> An int32 that defines the duration the transform will be broadcast for
    """
    # Create broadcast node
    br = tf2_ros.TransformBroadcaster()

    # Publish once first.
    stamped_transform.header.stamp = rospy.Time.now()
    br.sendTransform(stamped_transform)

    # Publish transform for set time.
    i = 0
    iterations = seconds/0.05
    while not rospy.is_shutdown() and i < iterations:
        stamped_transform.header.stamp = rospy.Time.now()
        br.sendTransform(stamped_transform)
        rospy.sleep(0.05)
        i += 1 
示例17
def __init__(self):

        rospy.init_node("mav_control_node")
        rospy.Subscriber("/mavros/local_position/pose", PoseStamped, self.pose_callback)
        rospy.Subscriber("/mavros/rc/in", RCIn, self.rc_callback)

        self.cmd_pos_pub = rospy.Publisher("/mavros/setpoint_position/local", PoseStamped, queue_size=1)
        self.cmd_vel_pub = rospy.Publisher("/mavros/setpoint_velocity/cmd_vel_unstamped", Twist, queue_size=1)
        self.rc_override = rospy.Publisher("/mavros/rc/override", OverrideRCIn, queue_size=1)

        # mode 0 = STABILIZE
        # mode 4 = GUIDED
        # mode 9 = LAND
        self.mode_service = rospy.ServiceProxy('/mavros/set_mode', SetMode)
        self.arm_service = rospy.ServiceProxy('/mavros/cmd/arming', CommandBool)
        self.takeoff_service = rospy.ServiceProxy('/mavros/cmd/takeoff', CommandTOL)

        self.rc = RCIn()
        self.pose = Pose()
        self.timestamp = rospy.Time() 
示例18
def __init__(self):

        rospy.init_node("mav_control_node")
        rospy.Subscriber("/mavros/local_position/pose", PoseStamped, self.pose_callback)
        rospy.Subscriber("/mavros/rc/in", RCIn, self.rc_callback)

        self.cmd_pos_pub = rospy.Publisher("/mavros/setpoint_position/local", PoseStamped, queue_size=1)
        self.cmd_vel_pub = rospy.Publisher("/mavros/setpoint_velocity/cmd_vel_unstamped", Twist, queue_size=1)
        self.rc_override = rospy.Publisher("/mavros/rc/override", OverrideRCIn, queue_size=1)

        # mode 0 = STABILIZE
        # mode 4 = GUIDED
        # mode 9 = LAND
        self.mode_service = rospy.ServiceProxy('/mavros/set_mode', SetMode)
        self.arm_service = rospy.ServiceProxy('/mavros/cmd/arming', CommandBool)
        self.takeoff_service = rospy.ServiceProxy('/mavros/cmd/takeoff', CommandTOL)

        self.rc = RCIn()
        self.pose = Pose()
        self.timestamp = rospy.Time() 
示例19
def servo_xy(self):
        print "translating in XY at speed:", self.inc
        d = self.centroid - self.goal_pos

        self.tf_listener.waitForTransform('/'+self.limb+'_hand_camera', '/base', rospy.Time(), rospy.Duration(4.0))
        (trans, rot) = self.tf_listener.lookupTransform('/'+self.limb+'_hand_camera', '/base', rospy.Time(0))
        R = tf.transformations.quaternion_matrix(rot)[:3, :3]
        d = numpy.concatenate( (d, numpy.zeros(1)) )
        d_rot = numpy.dot(R, d) 
        direction = self.inc*d_rot / numpy.linalg.norm(d_rot)
        if not self.outOfRange():
            direction[2] = self.inc
        else:
            direction[2] = 0
        
        direction *= self.inc/ numpy.linalg.norm(direction)
        #print direction

        self.command_ik(direction) 
示例20
def solve_goal_point(self, centroid):
        # Find the centroid in the point cloud
        x = int(centroid[0])
        y = int(centroid[1])
        depth = self.get_depth(x, y)
        print depth
        # Get pixel points in camera units
        v = self.camera_model.projectPixelTo3dRay((x, y))
        d_cam = numpy.concatenate( (depth*numpy.array(v), numpy.ones(1))).reshape((4, 1))

        # TODO: is this the right frame transform?
        self.tf_listener.waitForTransform('/base', '/camera_depth_optical_frame', rospy.Time(), rospy.Duration(4))
        (trans, rot) = self.tf_listener.lookupTransform('/base', '/camera_depth_optical_frame', rospy.Time())
        
        camera_to_base = tf.transformations.compose_matrix(translate=trans, angles=tf.transformations.euler_from_quaternion(rot))

        d_base = numpy.dot(camera_to_base, d_cam)
        d_base[2] -= params['object_height']/2.0
        return d_base[0:3] 
示例21
def solve_goal_pose(self, centroid):
        # Project centroid into 3D coordinates
        center = (centroid[0] - self.camera_x/2, centroid[1] - self.camera_y/2) 
        vec = numpy.array( self.camera_model.projectPixelTo3dRay(center) )
        # Scale it by the IR reading
        d_cam = ( self.ir_reading - self.min_ir_depth - self.object_height ) * vec
        d_cam = numpy.concatenate((d_cam, numpy.ones(1)))
        #print "Camera vector:", d_cam

        # Now transform into the world frame
        self.tf_listener.waitForTransform('/base', '/'+self.limb+'_hand_camera', rospy.Time(), rospy.Duration(4))
        (trans, rot) = self.tf_listener.lookupTransform('/base', '/'+self.limb+'_hand_camera', rospy.Time())
        
        camera_to_base = tf.transformations.compose_matrix(translate=trans, angles=tf.transformations.euler_from_quaternion(rot))

        d_base = numpy.dot(camera_to_base, d_cam)
        return d_base[0:3] 
示例22
def show_marker(marker_array_, pos_, ori_, scale_, color_, lifetime_):
    marker_ = Marker()
    marker_.header.frame_id = "/table_top"
    # marker_.header.stamp = rospy.Time.now()
    marker_.type = marker_.CUBE
    marker_.action = marker_.ADD

    marker_.pose.position.x = pos_[0]
    marker_.pose.position.y = pos_[1]
    marker_.pose.position.z = pos_[2]
    marker_.pose.orientation.x = ori_[1]
    marker_.pose.orientation.y = ori_[2]
    marker_.pose.orientation.z = ori_[3]
    marker_.pose.orientation.w = ori_[0]

    marker_.lifetime = rospy.Duration.from_sec(lifetime_)
    marker_.scale.x = scale_[0]
    marker_.scale.y = scale_[1]
    marker_.scale.z = scale_[2]
    marker_.color.a = 0.5
    red_, green_, blue_ = color_
    marker_.color.r = red_
    marker_.color.g = green_
    marker_.color.b = blue_
    marker_array_.markers.append(marker_) 
示例23
def _convert_frames(self, pt):
        assert len(pt) == 3
        rospy.loginfo("Point to convert: {}".format(pt))
        ps = PointStamped()
        ps.header.frame_id = KINECT_FRAME
        ps.point.x, ps.point.y, ps.point.z = pt
        base_ps = self._transform_listener.transformPoint(BASE_FRAME, ps)
        rospy.loginfo(
            "transform : {}".format(
                self._transform_listener.lookupTransform(
                    BASE_FRAME, KINECT_FRAME, rospy.Time(0)
                )
            )
        )
        base_pt = np.array([base_ps.point.x, base_ps.point.y, base_ps.point.z])
        rospy.loginfo("Base point to convert: {}".format(base_pt))
        return base_pt 
示例24
def _js_cb(self,msg):

        if not self.is_recording:
            #print "[JOINTS] Not currently recording!"
            return

        updated = self.TfUpdateWorld()
        if updated and not self.sync_gripper:
            # record joints
            self.times.append(rospy.Time.now())
            self.joint_states.append(msg)
            print msg.position
            self.world_states.append(copy.deepcopy(self.world))
        elif updated and (rospy.Time.now() - self.last_gripper_msg).to_sec() < self.gripper_t_threshold:
            # record joints
            self.times.append(rospy.Time.now())
            self.joint_states.append(msg)
            self.gripper_cmds.append(self.gripper_cmd)
            self.world_states.append(copy.deepcopy(self.world))
        else:
            print "[JOINTS] Waiting for TF (updated=%d) and gripper..."%(updated) 
示例25
def one_nn_action(move_to_pose, close_gripper, open_gripper, tf_buffer):
    """ Execute one simplified action based on the neural network proposed pose
    """
    home = GetHomePoseKDL()
    # move_to_pose(home)
    open_gripper()
    t = rospy.Time(0)
    rospy.sleep(1)
    goal_pose_name = 'predicted_goal_ee_link'
    pose = tf_buffer.lookup_transform('base_link', goal_pose_name, t)
    pose = pm.fromTf(pose_to_vec_quat_pair(pose))

    move_to_pose(pose)
    close_gripper()
    move_to_pose(home)
    move_to_pose(pose)
    open_gripper()
    move_to_pose(home) 
示例26
def tick(self):
        try:
            (trans,rot) = self.listener.lookupTransform('/map', '/base_link', rospy.Time(0))
            msg = self.get_link_state(link_name="base_link")
        except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException):
            return False

        self.pose = msg.link_state.pose
        self.trans, self.rot = trans, rot
        #quaternion = (rot[0], rot[1], rot[2], rot[3])
        orientation = self.pose.orientation
        quaternion = (orientation.x, orientation.y, orientation.z,
                orientation.w)
        euler = tf.transformations.euler_from_quaternion(quaternion)
        self.roll = euler[0]
        self.pitch = euler[1]
        self.yaw = euler[2]
        return True 
示例27
def list_to_pose(poselist, frame_id="", stamp=rospy.Time(0)):
    """
    Convert a pose in the form of a list in PoseStamped
    :param poselist: a pose on the form [[x, y, z], [x, y, z, w]]
    :param frame_id: the frame_id on the outputed pose (facultative, empty otherwise)
    :param stamp: the stamp of the outputed pose (facultative, 0 otherwise)
    :return: the converted geometry_msgs/PoseStampted object
    """
    p = PoseStamped()
    p.header.frame_id = frame_id
    p.header.stamp = stamp
    p.pose.position.x = poselist[0][0]
    p.pose.position.y = poselist[0][1]
    p.pose.position.z = poselist[0][2]
    p.pose.orientation.x = poselist[1][0]
    p.pose.orientation.y = poselist[1][1]
    p.pose.orientation.z = poselist[1][2]
    p.pose.orientation.w = poselist[1][3]
    return p 
示例28
def _get_transforms(self, time=None):
        """
        Samples the transforms at the given time.

        :param time: sampling time (now if None)
        :type time: None|rospy.Time
        :rtype: dict[str, ((float, float, float), (float, float, float, float))]
        """
        if time is None:
            time = rospy.Time.now()

        # here we trick the library (it is actually made for eye_on_hand only). Trust me, I'm an engineer
        if self.eye_on_hand:
            rob = self.tfBuffer.lookup_transform(self.robot_base_frame, self.robot_effector_frame, time,
                                                 rospy.Duration(10))
        else:
            rob = self.tfBuffer.lookup_transform(self.robot_effector_frame, self.robot_base_frame, time,
                                                 rospy.Duration(10))
        opt = self.tfBuffer.lookup_transform(self.tracking_base_frame, self.tracking_marker_frame, time,
                                             rospy.Duration(10))
        return {'robot': rob, 'optical': opt} 
示例29
def update_state(self, state):
        '''

        :param state:
        :type state: State
        :return:
        '''
        cur_time = rospy.Time().to_sec()
        if state.vx < 0.1:
            if self.car_state_stoped == 0.0:
                self.car_state_stoped = cur_time
            elif cur_time - self.car_state_stoped > 5.0 and self.state == EcuState.READY_TO_DRIVE:
                self.state = EcuState.EMERGENCY_STATE
                rospy.logwarn("Emergency State, too long standing still")
        else:
            self.car_state_stoped = 0.0 
示例30
def _publish_objects(self):
        """
        Publish detected object as transforms between odom_frame and object_frame.

        """

        for obj in self._cozmo.world.visible_objects:
            now = rospy.Time.now()
            x = obj.pose.position.x * 0.001
            y = obj.pose.position.y * 0.001
            z = obj.pose.position.z * 0.001
            q = (obj.pose.rotation.q1, obj.pose.rotation.q2, obj.pose.rotation.q3, obj.pose.rotation.q0)
            self._tfb.send_transform(
                (x, y, z), q, now, 'cube_' + str(obj.object_id), self._odom_frame
            )