Python tf.transformations() Examples

The following are 23 code examples of tf.transformations(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module tf , or try the search function .
Example #1
Source File: carla_waypoint_publisher.py    From ros-bridge with MIT License 21 votes vote down vote up
def publish_waypoints(self):
        """
        Publish the ROS message containing the waypoints
        """
        msg = Path()
        msg.header.frame_id = "map"
        msg.header.stamp = rospy.Time.now()
        if self.current_route is not None:
            for wp in self.current_route:
                pose = PoseStamped()
                pose.pose.position.x = wp[0].transform.location.x
                pose.pose.position.y = -wp[0].transform.location.y
                pose.pose.position.z = wp[0].transform.location.z

                quaternion = tf.transformations.quaternion_from_euler(
                    0, 0, -math.radians(wp[0].transform.rotation.yaw))
                pose.pose.orientation.x = quaternion[0]
                pose.pose.orientation.y = quaternion[1]
                pose.pose.orientation.z = quaternion[2]
                pose.pose.orientation.w = quaternion[3]
                msg.poses.append(pose)

        self.waypoint_publisher.publish(msg)
        rospy.loginfo("Published {} waypoints.".format(len(msg.poses))) 
Example #2
Source File: mathutils.py    From AI-Robot-Challenge-Lab with MIT License 8 votes vote down vote up
def get_homo_matrix_from_pose_msg(pose, tag=""):
    basetrans = tf.transformations.translation_matrix((pose.position.x,
                                                       pose.position.y,
                                                       pose.position.z))

    baserot = tf.transformations.quaternion_matrix((pose.orientation.x,
                                                    pose.orientation.y,
                                                    pose.orientation.z,
                                                    pose.orientation.w))

    # rospy.loginfo(tag + " basetrans: " + str(basetrans))
    # rospy.loginfo(tag +" baserot: " + str(baserot))

    combined = numpy.matmul(basetrans, baserot)

    # rospy.loginfo(tag + " combined: " + str(combined))
    trans = tf.transformations.translation_from_matrix(combined)
    quat = tf.transformations.quaternion_from_matrix(combined)

    # rospy.loginfo(tag + " back basetrans: " + str(trans))
    # rospy.loginfo(tag +" back baserot: " + str(quat))

    return combined 
Example #3
Source File: vslam.py    From pyrobot with MIT License 6 votes vote down vote up
def base_pose_xyyaw(self):
        """
        Returns the (x, y, yaw) of the base. Note that here we assume
         that the robot moves on a flat floor

        :returns: (x, y, yaw)
        :rtype: (float, float, float)
        """
        trans, rot, T = self.base_pose
        if T is None:
            return None, None, None
        angle = np.arctan2(rot[1, 0], rot[0, 0])
        # angle, axis, _ = tf.transformations.rotation_from_matrix(T)
        x = trans[0]
        y = trans[1]
        yaw = angle
        return x, y, yaw 
Example #4
Source File: transformations.py    From ikpy with GNU General Public License v2.0 6 votes vote down vote up
def multiply_transform_old(t1, t2):
    """
    Combines two transformations together
    The order is translation first, rotation then
    :param t1: [[x, y, z], [x, y, z, w]] or matrix 4x4
    :param t2: [[x, y, z], [x, y, z, w]] or matrix 4x4
    :return: The combination t1-t2 in the form [[x, y, z], [x, y, z, w]] or matrix 4x4
    """
    if _is_indexable(t1) and len(t1) == 2:
        t1m = tf.transformations.translation_matrix(t1[0])
        r1m = tf.transformations.quaternion_matrix(t1[1])
        m1m = dot(t1m, r1m)
        t2m = tf.transformations.translation_matrix(t2[0])
        r2m = tf.transformations.quaternion_matrix(t2[1])
        m2m = dot(t2m, r2m)
        rm = dot(m1m, m2m)
        rt = tf.transformations.translation_from_matrix(rm)
        rr = tf.transformations.quaternion_from_matrix(rm)
        return [list(rt), list(rr)]
    else:
        return dot(t1, t2) 
Example #5
Source File: transformations.py    From ikpy with GNU General Public License v2.0 6 votes vote down vote up
def quat_rotate(rotation, vector):
    """
    Rotate a vector according to a quaternion. Equivalent to the C++ method tf::quatRotate
    :param rotation: the rotation
    :param vector: the vector to rotate
    :return: the rotated vector
    """

    def quat_mult_point(q, w):
        return (q[3] * w[0] + q[1] * w[2] - q[2] * w[1],
                q[3] * w[1] + q[2] * w[0] - q[0] * w[2],
                q[3] * w[2] + q[0] * w[1] - q[1] * w[0],
                -q[0] * w[0] - q[1] * w[1] - q[2] * w[2])

    q = quat_mult_point(rotation, vector)
    q = tf.transformations.quaternion_multiply(
        q, tf.transformations.quaternion_inverse(rotation))
    return [q[0], q[1], q[2]] 
Example #6
Source File: vncc.py    From prpy with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def _LocalToWorld(self,pose):
        """
        Transform a pose from local frame to world frame
        @param pose The 4x4 transformation matrix containing the pose to transform
        @return The 4x4 transformation matrix describing the pose in world frame
        """
        #Get pose w.r.t world frame
        self.listener.waitForTransform(self.world_frame,self.detection_frame,
                                       rospy.Time(),rospy.Duration(10))
        t, r = self.listener.lookupTransform(self.world_frame,self.detection_frame,
                                             rospy.Time(0))
        
        #Get relative transform between frames
        offset_to_world = numpy.matrix(transformations.quaternion_matrix(r))
        offset_to_world[0,3] = t[0]
        offset_to_world[1,3] = t[1]
        offset_to_world[2,3] = t[2]
        
        #Compose with pose to get pose in world frame
        result = numpy.array(numpy.dot(offset_to_world, pose))
        
        return result 
Example #7
Source File: simtrack.py    From prpy with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def _LocalToWorld(self,pose):
        """
        Transform a pose from local frame to world frame
        @param pose The 4x4 transformation matrix containing the pose to transform
        @return The 4x4 transformation matrix describing the pose in world frame
        """
        import rospy
        #Get pose w.r.t world frame
        self.listener.waitForTransform(self.world_frame,self.detection_frame,
                                       rospy.Time(),rospy.Duration(10))
        t, r = self.listener.lookupTransform(self.world_frame,self.detection_frame,
                                             rospy.Time(0))
        
        #Get relative transform between frames
        offset_to_world = numpy.matrix(transformations.quaternion_matrix(r))
        offset_to_world[0,3] = t[0]
        offset_to_world[1,3] = t[1]
        offset_to_world[2,3] = t[2]
        
        #Compose with pose to get pose in world frame
        result = numpy.array(numpy.dot(offset_to_world, pose))
        
        return result 
Example #8
Source File: simtrack.py    From prpy with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def _MsgToPose(msg):
        """
        Parse the ROS message to a 4x4 pose format
        @param msg The ros message containing a pose
        @return A 4x4 transformation matrix containing the pose
        as read from the message
        """
        import tf.transformations as transformations
        #Get translation and rotation (from Euler angles)
        pose = transformations.quaternion_matrix(numpy.array([msg.pose.orientation.x, msg.pose.orientation.y, msg.pose.orientation.z, msg.pose.orientation.w]))
    
        pose[0,3] = msg.pose.position.x
        pose[1,3] = msg.pose.position.y
        pose[2,3] = msg.pose.position.z
        
	return pose 
Example #9
Source File: task_planner.py    From AI-Robot-Challenge-Lab with MIT License 6 votes vote down vote up
def compute_grasp_pose_offset(self, pose):
        """
        :param pose:
        :return:
        """

        yrot = tf.transformations.quaternion_from_euler(0, math.pi, 0)

        cubeorientation = [pose.orientation.x, pose.orientation.y, pose.orientation.z, pose.orientation.w]
        # oorient = [overhead_orientation.x,overhead_orientation.y,overhead_orientation.z,overhead_orientation.w]

        # resultingorient = tf.transformations.quaternion_multiply(cubeorientation, tf.transformations.quaternion_conjugate(oorient))
        resultingorient = tf.transformations.quaternion_multiply(cubeorientation, yrot)

        # resultingorient = cubeorientation


        pose.orientation = Quaternion(x=resultingorient[0], y=resultingorient[1], z=resultingorient[2],
                                      w=resultingorient[3])

        pose.position.x += 0
        pose.position.y += 0
        pose.position.z = demo_constants.TABLE_HEIGHT + demo_constants.CUBE_EDGE_LENGTH
        return pose 
Example #10
Source File: vncc.py    From prpy with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def _MsgToPose(msg):
        """
        Parse the ROS message to a 4x4 pose format
        @param msg The ros message containing a pose
        @return A 4x4 transformation matrix containing the pose
        as read from the message
        """
        #Get translation and rotation (from Euler angles)
        pose = transformations.euler_matrix(msg.roll*0.0,msg.pitch*0.0,msg.yaw*0.0) 
    
        pose[0,3] = msg.pt.x
        pose[1,3] = msg.pt.y
        pose[2,3] = msg.pt.z
        
	return pose 
Example #11
Source File: simtrack.py    From prpy with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def __init__(self, kinbody_path, detection_frame, world_frame,
                 service_namespace=None):
        """
        This initializes a simtrack detector.
        
        @param kinbody_path The path to the folder where kinbodies are stored
        @param detection_frame The TF frame of the camera
        @param world_frame The desired world TF frame
        @param service_namespace The namespace for the simtrack service (default: /simtrack)
        """
        import rospy
        import tf
        import tf.transformations as transformations
        # Initialize a new ros node if one has not already been created
        try:
            rospy.init_node('simtrack_detector', anonymous=True)
        except rospy.exceptions.ROSException:
            pass
            
        #For getting transforms in world frame
        if detection_frame is not None and world_frame is not None:
            self.listener = tf.TransformListener()
        else:
            self.listener = None
                
        if service_namespace is None:
            service_namespace='/simtrack'

        self.detection_frame = detection_frame
        self.world_frame = world_frame
        self.service_namespace = service_namespace
            
        self.kinbody_path = kinbody_path

        # A map of known objects that can be queries
        self.kinbody_to_query_map = {'fuze_bottle': 'fuze_bottle_visual',
                                     'pop_tarts': 'pop_tarts_visual'}
        self.query_to_kinbody_map = {'fuze_bottle_visual': 'fuze_bottle',
                                     'pop_tarts_visual': 'pop_tarts'} 
Example #12
Source File: gt_trajectories_from_bag.py    From mvsec with MIT License 5 votes vote down vote up
def pose_to_tf(msg):
    q = np.array([msg.pose.orientation.x, msg.pose.orientation.y, msg.pose.orientation.z, msg.pose.orientation.w])
    T = tf.transformations.quaternion_matrix(q)
    T[0:3,3] = np.array([msg.pose.position.x, msg.pose.position.y, msg.pose.position.z])
    return (T) 
Example #13
Source File: tray.py    From AI-Robot-Challenge-Lab with MIT License 5 votes vote down vote up
def get_tray_place_block_pose(self):
        #return self.gazebo_pose
        copygazebopose = copy.deepcopy(self.gazebo_pose)

        yoffset = -0.08 + 0.075 * len(self.blocks)
        copygazebopose.position.y -= yoffset
        copygazebopose.position.z += self.TRAY_SURFACE_THICKNESS

        zrot = tf.transformations.quaternion_from_euler(0, 0, -math.pi/2.0)

        trayorientatin = [copygazebopose.orientation.x, copygazebopose.orientation.y, copygazebopose.orientation.z, copygazebopose.orientation.w]
        # oorient = [overhead_orientation.x,overhead_orientation.y,overhead_orientation.z,overhead_orientation.w]

        # resultingorient = tf.transformations.quaternion_multiply(cubeorientation, tf.transformations.quaternion_conjugate(oorient))
        resultingorient = tf.transformations.quaternion_multiply(trayorientatin, zrot)

        # resultingorient = cubeorientation


        copygazebopose.orientation = Quaternion(x=resultingorient[0], y=resultingorient[1], z=resultingorient[2],
                                      w=resultingorient[3])


        rospy.logwarn("Tray Place location (objects %d, %lf): " % (len(self.blocks), yoffset) + str(copygazebopose))

        return copygazebopose 
Example #14
Source File: vslam.py    From pyrobot with MIT License 5 votes vote down vote up
def _parse_pose_msg(self, pose):
        """
        Convert the ros topic pose (geometry_msgs/Pose)
        into translational vector
        and the rotational vector

        :param pose: pose
        :type pose: geometry_msgs.Pose

        :return: (pos, ori)
                  pos: translational vector (shape: :math:`[3, ]`)
                  ori: rotational vector (shape: :math:`[3, 3]`)
        :rtype: (numpy.ndarray, numpy.ndarray)
        """
        pos = np.array(
            [pose.pose.position.x, pose.pose.position.y, pose.pose.position.z]
        )
        quat = np.array(
            [
                pose.pose.orientation.x,
                pose.pose.orientation.y,
                pose.pose.orientation.z,
                pose.pose.orientation.w,
            ]
        )
        ori = tf.transformations.quaternion_matrix(quat)[:3, :3]
        return pos, ori 
Example #15
Source File: vslam.py    From pyrobot with MIT License 5 votes vote down vote up
def get_link_transform(self, src, tgt):
        """
        Returns the latest transformation from the target_frame to the source frame,
        i.e., the transform of source frame w.r.t target frame. If the returned
        transform is applied to data, it will transform data in the source_frame into
         the target_frame

        For more information, please refer to
        http://wiki.ros.org/tf/Overview/Using%20Published%20Transforms

        :param src: source frame
        :param tgt: target frame
        :type src: string
        :type tgt: string

        :returns: (trans, rot, T)
                  trans: translational vector (shape: :math:`[3,]`)
                  rot: rotation matrix (shape: :math:`[3, 3]`)
                  T: transofrmation matrix (shape: :math:`[4, 4]`)
        :rtype: (numpy.ndarray, numpy.ndarray, numpy.ndarray)
        """
        trans, quat = self._get_tf_transform(self.tf_listener, tgt, src)
        rot = tf.transformations.quaternion_matrix(quat)[:3, :3]
        T = np.eye(4)
        T[:3, :3] = rot
        T[:3, 3] = trans
        return trans, rot, T 
Example #16
Source File: base.py    From pyrobot with MIT License 5 votes vote down vote up
def _odometry_callback(self, msg, state_var):
        """Callback function to populate the state object."""
        orientation_q = msg.pose.pose.orientation
        orientation_list = [
            orientation_q.x,
            orientation_q.y,
            orientation_q.z,
            orientation_q.w,
        ]
        (roll, pitch, yaw) = tf.transformations.euler_from_quaternion(orientation_list)
        state = copy.deepcopy(getattr(self, state_var))
        state.update(msg.pose.pose.position.x, msg.pose.pose.position.y, yaw)
        setattr(self, state_var, state) 
Example #17
Source File: base_control_utils.py    From pyrobot with MIT License 5 votes vote down vote up
def build_pose_msg(x, y, theta, frame):
    pose_stamped = PoseStamped()
    pose_stamped.header.frame_id = frame
    pose_stamped.pose.position.x = x
    pose_stamped.pose.position.y = y
    pose_stamped.pose.position.z = 0
    quat = tf.transformations.quaternion_from_euler(0, 0, theta)
    pose_stamped.pose.orientation.x = quat[0]
    pose_stamped.pose.orientation.y = quat[1]
    pose_stamped.pose.orientation.z = quat[2]
    pose_stamped.pose.orientation.w = quat[3]
    return pose_stamped 
Example #18
Source File: transformations.py    From ikpy with GNU General Public License v2.0 5 votes vote down vote up
def list_to_m4x4(pose_list):
    pos = tf.transformations.translation_matrix(pose_list[0])
    quat = tf.transformations.quaternion_matrix(pose_list[1])
    return dot(pos, quat) 
Example #19
Source File: transformations.py    From ikpy with GNU General Public License v2.0 5 votes vote down vote up
def m4x4_to_list(m4x4):
    return [
        tf.transformations.translation_from_matrix(m4x4),
        tf.transformations.quaternion_from_matrix(m4x4)
    ] 
Example #20
Source File: transformations.py    From ikpy with GNU General Public License v2.0 5 votes vote down vote up
def multiply_transform(t1, t2):
    """
    Combines two transformations together
    The order is translation first, rotation then
    :param t1: [[x, y, z], [x, y, z, w]] or matrix 4x4
    :param t2: [[x, y, z], [x, y, z, w]] or matrix 4x4
    :return: The combination t1-t2 in the form [[x, y, z], [x, y, z, w]] or matrix 4x4
    """
    if _is_indexable(t1) and len(t1) == 2:
        return [
            list(quat_rotate(t1[1], t2[0]) + array(t1[0])),
            list(tf.transformations.quaternion_multiply(t1[1], t2[1]))
        ]
    else:
        return dot(t1, t2) 
Example #21
Source File: mathutils.py    From AI-Robot-Challenge-Lab with MIT License 5 votes vote down vote up
def homotransform_to_pose_msg(homotransform):
    trans = tf.transformations.translation_from_matrix(homotransform)
    quat = tf.transformations.quaternion_from_matrix(homotransform)
    return Pose(
        position=Point(x=trans[0], y=trans[1], z=trans[2]),
        orientation=Quaternion(x=quat[0], y=quat[1], z=quat[2], w=quat[3])) 
Example #22
Source File: vehicle_position_validate.py    From fssim with MIT License 5 votes vote down vote up
def get_track_init_pos(self):
        self.pose_init = Point(self.track_details['starting_pose_front_wing'])
        yaw = self.track_details['starting_pose_front_wing'][2]
        qt = tf.transformations.quaternion_from_euler(0.0, 0.0, yaw)
        return self.pose_init, Quaternion(*qt) 
Example #23
Source File: environment_estimation.py    From AI-Robot-Challenge-Lab with MIT License 4 votes vote down vote up
def __init__(self):
        """
        """
        self.gazebo_trays = []
        self.gazebo_blocks = []

        self.trays = []
        self.blocks = []

        self.tf_broacaster = tf.TransformBroadcaster()
        self.tf_listener = tf.TransformListener()

        # initial simulated implementation
        pub = rospy.Subscriber('/gazebo/link_states', LinkStates, self.simulated_link_state_callback, queue_size=10)

        self.gazebo_world_to_ros_transform = None
        self.original_blocks_poses_ = None
        self.mutex = RLock()

        TABLE_HEIGHT = -0.12
        self.head_camera_helper = CameraHelper("head_camera", "base", TABLE_HEIGHT)
        self.bridge = CvBridge()
        self.block_pose_estimation_head_camera = None
        self.table = Table()

        self.hand_camera_helper = CameraHelper("right_hand_camera", "base", TABLE_HEIGHT)

        if demo_constants.is_real_robot():
            k = 3
            for i in xrange(k):
                for j in xrange(k):
                    q = tf.transformations.quaternion_from_euler(random.uniform(0, 2 * math.pi),
                                                                 random.uniform(0, 2 * math.pi),
                                                                 random.uniform(0, 2 * math.pi))

                    block = BlockState(id=str(len(self.gazebo_blocks)),
                                       pose=Pose(position=Point(x=0.45 + j * 0.15 + random.uniform(-1, 1) * 0.03,
                                                                y=-0.15 + i * 0.15 + random.uniform(-1, 1) * 0.03,
                                                                z=0.7725),
                                                 orientation=Quaternion(x=q[0], y=q[1], z=q[2], w=q[3])))

                    self.gazebo_blocks.append(block)