Python pybullet.getMatrixFromQuaternion() Examples

The following are 14 code examples of pybullet.getMatrixFromQuaternion(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module pybullet , or try the search function .
Example #1
Source File: minitaur_gym_env.py    From soccer-matlab with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def is_fallen(self):
    """Decide whether the minitaur has fallen.

    If the up directions between the base and the world is larger (the dot
    product is smaller than 0.85) or the base is very low on the ground
    (the height is smaller than 0.13 meter), the minitaur is considered fallen.

    Returns:
      Boolean value that indicates whether the minitaur has fallen.
    """
    orientation = self.minitaur.GetBaseOrientation()
    rot_mat = self._pybullet_client.getMatrixFromQuaternion(orientation)
    local_up = rot_mat[6:]
    pos = self.minitaur.GetBasePosition()
    return (np.dot(np.asarray([0, 0, 1]), np.asarray(local_up)) < 0.85 or
            pos[2] < 0.13) 
Example #2
Source File: minitaur_gym_env.py    From soccer-matlab with BSD 2-Clause "Simplified" License 6 votes vote down vote up
def _reward(self):
    current_base_position = self.minitaur.GetBasePosition()
    forward_reward = current_base_position[0] - self._last_base_position[0]
    # Cap the forward reward if a cap is set.
    forward_reward = min(forward_reward, self._forward_reward_cap)
    # Penalty for sideways translation.
    drift_reward = -abs(current_base_position[1] - self._last_base_position[1])
    # Penalty for sideways rotation of the body.
    orientation = self.minitaur.GetBaseOrientation()
    rot_matrix = pybullet.getMatrixFromQuaternion(orientation)
    local_up_vec = rot_matrix[6:]
    shake_reward = -abs(np.dot(np.asarray([1, 1, 0]), np.asarray(local_up_vec)))
    energy_reward = -np.abs(
        np.dot(self.minitaur.GetMotorTorques(),
               self.minitaur.GetMotorVelocities())) * self._time_step
    objectives = [forward_reward, energy_reward, drift_reward, shake_reward]
    weighted_objectives = [
        o * w for o, w in zip(objectives, self._objective_weights)
    ]
    reward = sum(weighted_objectives)
    self._objectives.append(objectives)
    return reward 
Example #3
Source File: minitaur_env.py    From midlevel-reps with MIT License 6 votes vote down vote up
def is_fallen(self):
        """Decide whether the minitaur has fallen.

        If the up directions between the base and the world is larger (the dot
        product is smaller than 0.85) or the base is very low on the ground
        (the height is smaller than 0.13 meter), the minitaur is considered fallen.

        Returns:
          Boolean value that indicates whether the minitaur has fallen.
        """
        orientation = self.robot.GetBaseOrientation()
        rot_mat = pybullet.getMatrixFromQuaternion(orientation)
        local_up = rot_mat[6:]
        pos = self.robot.GetBasePosition()
        #return (np.dot(np.asarray([0, 0, 1]), np.asarray(local_up)) < 0.85 or
        #        pos[2] < 0.13)
        return False 
Example #4
Source File: rex_gym_env.py    From rex-gym with Apache License 2.0 6 votes vote down vote up
def _reward(self):
        current_base_position = self.rex.GetBasePosition()
        # forward direction
        forward_reward = -current_base_position[0] + self._last_base_position[0]
        # Cap the forward reward if a cap is set.
        forward_reward = min(forward_reward, self._forward_reward_cap)
        # Penalty for sideways translation.
        drift_reward = -abs(current_base_position[1] - self._last_base_position[1])
        # Penalty for sideways rotation of the body.
        orientation = self.rex.GetBaseOrientation()
        rot_matrix = pybullet.getMatrixFromQuaternion(orientation)
        local_up_vec = rot_matrix[6:]
        shake_reward = -abs(np.dot(np.asarray([1, 1, 0]), np.asarray(local_up_vec)))
        energy_reward = -np.abs(
            np.dot(self.rex.GetMotorTorques(),
                   self.rex.GetMotorVelocities())) * self._time_step
        objectives = [forward_reward, energy_reward, drift_reward, shake_reward]
        weighted_objectives = [o * w for o, w in zip(objectives, self._objective_weights)]
        reward = sum(weighted_objectives)
        self._objectives.append(objectives)
        return reward 
Example #5
Source File: minitaur_evaluate.py    From soccer-matlab with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def is_fallen():
  global minitaur
  orientation = minitaur.getBaseOrientation()
  rotMat = p.getMatrixFromQuaternion(orientation)
  localUp = rotMat[6:]
  return np.dot(np.asarray([0, 0, 1]), np.asarray(localUp)) < 0 
Example #6
Source File: kukaGymEnv.py    From soccer-matlab with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def getExtendedObservation(self):
     self._observation = self._kuka.getObservation()
     gripperState  = p.getLinkState(self._kuka.kukaUid,self._kuka.kukaGripperIndex)
     gripperPos = gripperState[0]
     gripperOrn = gripperState[1]
     blockPos,blockOrn = p.getBasePositionAndOrientation(self.blockUid)

     invGripperPos,invGripperOrn = p.invertTransform(gripperPos,gripperOrn)
     gripperMat = p.getMatrixFromQuaternion(gripperOrn)
     dir0 = [gripperMat[0],gripperMat[3],gripperMat[6]]
     dir1 = [gripperMat[1],gripperMat[4],gripperMat[7]]
     dir2 = [gripperMat[2],gripperMat[5],gripperMat[8]]

     gripperEul =  p.getEulerFromQuaternion(gripperOrn)
     #print("gripperEul")
     #print(gripperEul)
     blockPosInGripper,blockOrnInGripper = p.multiplyTransforms(invGripperPos,invGripperOrn,blockPos,blockOrn)
     projectedBlockPos2D =[blockPosInGripper[0],blockPosInGripper[1]]
     blockEulerInGripper = p.getEulerFromQuaternion(blockOrnInGripper)
     #print("projectedBlockPos2D")
     #print(projectedBlockPos2D)
     #print("blockEulerInGripper")
     #print(blockEulerInGripper)

     #we return the relative x,y position and euler angle of block in gripper space
     blockInGripperPosXYEulZ =[blockPosInGripper[0],blockPosInGripper[1],blockEulerInGripper[2]]

     #p.addUserDebugLine(gripperPos,[gripperPos[0]+dir0[0],gripperPos[1]+dir0[1],gripperPos[2]+dir0[2]],[1,0,0],lifeTime=1)
     #p.addUserDebugLine(gripperPos,[gripperPos[0]+dir1[0],gripperPos[1]+dir1[1],gripperPos[2]+dir1[2]],[0,1,0],lifeTime=1)
     #p.addUserDebugLine(gripperPos,[gripperPos[0]+dir2[0],gripperPos[1]+dir2[1],gripperPos[2]+dir2[2]],[0,0,1],lifeTime=1)

     self._observation.extend(list(blockInGripperPosXYEulZ))
     return self._observation 
Example #7
Source File: bullet_physics_engine.py    From NTP-vat-release with MIT License 5 votes vote down vote up
def mat33_from_quat(quat):
        quat = list(quat)
        mat33 = p.getMatrixFromQuaternion(quat)
        return np.reshape(mat33, [3, 3]) 
Example #8
Source File: bullet_physics_engine.py    From NTP-vat-release with MIT License 5 votes vote down vote up
def mat33_from_euler(euler):
        euler = list(euler)
        quat = p.getQuaternionFromEuler(euler)
        mat33 = p.getMatrixFromQuaternion(quat)
        return np.reshape(mat33, [3, 3]) 
Example #9
Source File: bullet_physics_engine.py    From NTP-vat-release with MIT License 5 votes vote down vote up
def pos_in_frame(pos, frame):
        frame_xyz = frame[0]
        frame_rpy = frame[1]
        quat = p.getQuaternionFromEuler(frame_rpy)
        mat33 = p.getMatrixFromQuaternion(quat)
        mat33 = np.reshape(mat33, [3, 3])
        pos_in_frame = frame_xyz + np.dot(pos, mat33.T)
        return pos_in_frame 
Example #10
Source File: bullet_physics_engine.py    From NTP-vat-release with MIT License 5 votes vote down vote up
def get_body_mat33(body):
        _, quat = p.getBasePositionAndOrientation(body)
        mat33 = p.getMatrixFromQuaternion(quat)
        return np.reshape(mat33, [3, 3]) 
Example #11
Source File: camera.py    From qibullet with Apache License 2.0 5 votes vote down vote up
def _getCameraImage(self):
        """
        INTERNAL METHOD, Computes the OpenGL virtual camera image. The
        resolution and the projection matrix have to be computed before calling
        this method, or it will crash

        Returns:
            camera_image - The camera image of the OpenGL virtual camera
        """
        _, _, _, _, pos_world, q_world = pybullet.getLinkState(
            self.robot_model,
            self.camera_link.getParentIndex(),
            computeForwardKinematics=False,
            physicsClientId=self.physics_client)

        rotation = pybullet.getMatrixFromQuaternion(q_world)
        forward_vector = [rotation[0], rotation[3], rotation[6]]
        up_vector = [rotation[2], rotation[5], rotation[8]]

        camera_target = [
            pos_world[0] + forward_vector[0] * 10,
            pos_world[1] + forward_vector[1] * 10,
            pos_world[2] + forward_vector[2] * 10]

        view_matrix = pybullet.computeViewMatrix(
            pos_world,
            camera_target,
            up_vector,
            physicsClientId=self.physics_client)

        with self.resolution_lock:
            camera_image = pybullet.getCameraImage(
                self.resolution.width,
                self.resolution.height,
                view_matrix,
                self.projection_matrix,
                renderer=pybullet.ER_BULLET_HARDWARE_OPENGL,
                flags=pybullet.ER_NO_SEGMENTATION_MASK,
                physicsClientId=self.physics_client)

        return camera_image 
Example #12
Source File: rex_gym_env.py    From rex-gym with Apache License 2.0 5 votes vote down vote up
def is_fallen(self):
        """Decide whether Rex has fallen.

        If the up directions between the base and the world is larger (the dot
        product is smaller than 0.85) or the base is very low on the ground
        (the height is smaller than 0.13 meter), rex is considered fallen.

        Returns:
          Boolean value that indicates whether rex has fallen.
        """
        orientation = self.rex.GetBaseOrientation()
        rot_mat = self._pybullet_client.getMatrixFromQuaternion(orientation)
        local_up = rot_mat[6:]
        return np.dot(np.asarray([0, 0, 1]), np.asarray(local_up)) < 0.85 
Example #13
Source File: helloworld_panda.py    From pybullet-robot-envs with GNU Lesser General Public License v2.1 5 votes vote down vote up
def render(robot):
    pos, rot, _, _, _, _ = p.getLinkState(robot.robot_id, linkIndex=robot.end_eff_idx, computeForwardKinematics=True)
    rot_matrix = p.getMatrixFromQuaternion(rot)
    rot_matrix = np.array(rot_matrix).reshape(3, 3)

    # camera params
    height = 640
    width = 480
    fx, fy = 596.6278076171875, 596.6278076171875
    cx, cy = 311.98663330078125, 236.76170349121094
    near, far = 0.1, 10

    camera_vector = rot_matrix.dot((0, 0, 1))
    up_vector = rot_matrix.dot((0, -1, 0))

    camera_eye_pos = np.array(pos)
    camera_target_position = camera_eye_pos + 0.2 * camera_vector

    view_matrix = p.computeViewMatrix(camera_eye_pos, camera_target_position, up_vector)

    proj_matrix = (2.0 * fx / width, 0.0, 0.0, 0.0,
                   0.0, 2.0 * fy / height, 0.0, 0.0,
                   1.0 - 2.0 * cx / width, 2.0 * cy / height - 1.0, (far + near) / (near - far), -1.0,
                   0.0, 0.0, 2.0 * far * near / (near - far), 0.0)

    p.getCameraImage(width=width, height=height, viewMatrix=view_matrix, projectionMatrix=proj_matrix,
                     renderer=p.ER_BULLET_HARDWARE_OPENGL)  # renderer=self._p.ER_TINY_RENDERER) 
Example #14
Source File: turtlebot_pybullet.py    From SocialRobot with Apache License 2.0 4 votes vote down vote up
def get_image(cam_pos, cam_orientation):
    """
    Arguments
        cam_pos: camera position
        cam_orientation: camera orientation in quaternion
    """
    width = 160
    height = 120
    fov = 90
    aspect = width / height
    near = 0.001
    far = 5

    if use_maximal_coordinates:
        # cam_orientation has problem when enable bt_rigid_body,
        # looking at 0.0, 0.0, 0.0 instead
        # this does not affect performance
        cam_pos_offset = cam_pos + np.array([0.0, 0.0, 0.3])
        target_pos = np.array([0.0, 0.0, 0.0])
    else:
        # camera pos, look at, camera up direction
        rot_matrix = p.getMatrixFromQuaternion(cam_orientation)
        # offset to base pos
        cam_pos_offset = cam_pos + np.dot(
            np.array(rot_matrix).reshape(3, 3), np.array([0.1, 0.0, 0.3]))
        target_pos = cam_pos_offset + np.dot(
            np.array(rot_matrix).reshape(3, 3), np.array([-1.0, 0.0, 0.0]))
    # compute view matrix
    view_matrix = p.computeViewMatrix(cam_pos_offset, target_pos, [0, 0, 1])
    projection_matrix = p.computeProjectionMatrixFOV(fov, aspect, near, far)

    # Get depth values using the OpenGL renderer
    if enable_open_gl_rendering:
        w, h, rgb, depth, seg = p.getCameraImage(
            width,
            height,
            view_matrix,
            projection_matrix,
            shadow=True,
            renderer=p.ER_BULLET_HARDWARE_OPENGL)
    else:
        w, h, rgb, depth, seg = p.getCameraImage(
            width,
            height,
            view_matrix,
            projection_matrix,
            shadow=True,
            renderer=p.ER_TINY_RENDERER)

    # depth_buffer = np.reshape(images[3], [width, height])
    # depth = far * near / (far - (far - near) * depth_buffer)
    # seg = np.reshape(images[4],[width,height])*1./255.
    return rgb