Python rospy.Duration() Examples
The following are 30
code examples of rospy.Duration().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
rospy
, or try the search function
.
Example #1
Source File: pose_manager.py From naoqi_bridge with BSD 3-Clause "New" or "Revised" License | 7 votes |
def __init__(self): # ROS initialization: rospy.init_node('pose_manager') self.poseLibrary = dict() self.readInPoses() self.poseServer = actionlib.SimpleActionServer("body_pose", BodyPoseAction, execute_cb=self.executeBodyPose, auto_start=False) self.trajectoryClient = actionlib.SimpleActionClient("joint_trajectory", JointTrajectoryAction) if self.trajectoryClient.wait_for_server(rospy.Duration(3.0)): try: rospy.wait_for_service("stop_walk_srv", timeout=2.0) self.stopWalkSrv = rospy.ServiceProxy("stop_walk_srv", Empty) except: rospy.logwarn("stop_walk_srv not available, pose_manager will not stop the walker before executing a trajectory. " +"This is normal if there is no nao_walker running.") self.stopWalkSrv = None self.poseServer.start() rospy.loginfo("pose_manager running, offering poses: %s", self.poseLibrary.keys()); else: rospy.logfatal("Could not connect to required \"joint_trajectory\" action server, is the nao_controller node running?"); rospy.signal_shutdown("Required component missing");
Example #2
Source File: PR2.py From visual_dynamics with MIT License | 7 votes |
def follow_timed_joint_trajectory(self, positions, velocities, times): jt = tm.JointTrajectory() jt.joint_names = self.joint_names jt.header.stamp = rospy.Time.now() for (position, velocity, time) in zip(positions, velocities, times): jtp = tm.JointTrajectoryPoint() jtp.positions = position jtp.velocities = velocity jtp.time_from_start = rospy.Duration(time) jt.points.append(jtp) self.controller_pub.publish(jt) rospy.loginfo("%s: starting %.2f sec traj", self.controller_name, times[-1]) self.pr2.start_thread(JustWaitThread(times[-1]))
Example #3
Source File: pose_manager.py From naoqi_bridge with BSD 3-Clause "New" or "Revised" License | 6 votes |
def parseXapPoses(self, xaplibrary): try: poses = xapparser.getpostures(xaplibrary) except RuntimeError as re: rospy.logwarn("Error while parsing the XAP file: %s" % str(re)) return for name, pose in poses.items(): trajectory = JointTrajectory() trajectory.joint_names = pose.keys() joint_values = pose.values() point = JointTrajectoryPoint() point.time_from_start = Duration(2.0) # hardcoded duration! point.positions = pose.values() trajectory.points = [point] self.poseLibrary[name] = trajectory
Example #4
Source File: position_action_client.py From ggcnn_kinova_grasping with BSD 3-Clause "New" or "Revised" License | 6 votes |
def move_to_position(position, orientation): """Send a cartesian goal to the action server.""" global position_client if position_client is None: init() goal = kinova_msgs.msg.ArmPoseGoal() goal.pose.header = std_msgs.msg.Header(frame_id=('m1n6s200_link_base')) goal.pose.pose.position = geometry_msgs.msg.Point( x=position[0], y=position[1], z=position[2]) goal.pose.pose.orientation = geometry_msgs.msg.Quaternion( x=orientation[0], y=orientation[1], z=orientation[2], w=orientation[3]) position_client.send_goal(goal) if position_client.wait_for_result(rospy.Duration(10.0)): return position_client.get_result() else: position_client.cancel_all_goals() print(' the cartesian action timed-out') return None
Example #5
Source File: transforms.py From ggcnn_kinova_grasping with BSD 3-Clause "New" or "Revised" License | 6 votes |
def convert_pose(pose, from_frame, to_frame): """ Convert a pose or transform between frames using tf. pose -> A geometry_msgs.msg/Pose that defines the robots position and orientation in a reference_frame from_frame -> A string that defines the original reference_frame of the robot to_frame -> A string that defines the desired reference_frame of the robot to convert to """ global tfBuffer, listener # Create Listener objet to recieve and buffer transformations trans = None try: trans = tfBuffer.lookup_transform(to_frame, from_frame, rospy.Time(0), rospy.Duration(1.0)) except (tf2_ros.LookupException, tf2_ros.ConnectivityException, tf2_ros.ExtrapolationException), e: print(e) rospy.logerr('FAILED TO GET TRANSFORM FROM %s to %s' % (to_frame, from_frame)) return None
Example #6
Source File: gripper_action_client.py From ggcnn_kinova_grasping with BSD 3-Clause "New" or "Revised" License | 6 votes |
def set_finger_positions(finger_positions): """Send a gripper goal to the action server.""" global gripper_client global finger_maxTurn finger_positions[0] = min(finger_maxTurn, finger_positions[0]) finger_positions[1] = min(finger_maxTurn, finger_positions[1]) goal = kinova_msgs.msg.SetFingersPositionGoal() goal.fingers.finger1 = float(finger_positions[0]) goal.fingers.finger2 = float(finger_positions[1]) # The MICO arm has only two fingers, but the same action definition is used if len(finger_positions) < 3: goal.fingers.finger3 = 0.0 else: goal.fingers.finger3 = float(finger_positions[2]) gripper_client.send_goal(goal) if gripper_client.wait_for_result(rospy.Duration(5.0)): return gripper_client.get_result() else: gripper_client.cancel_all_goals() rospy.WARN(' the gripper action timed-out') return None
Example #7
Source File: PR2.py From visual_dynamics with MIT License | 6 votes |
def goto_joint_positions(self, positions_goal): positions_cur = self.get_joint_positions() assert len(positions_goal) == len(positions_cur) duration = norm((r_[positions_goal] - r_[positions_cur]) / self.vel_limits, ord=inf) jt = tm.JointTrajectory() jt.joint_names = self.joint_names jt.header.stamp = rospy.Time.now() jtp = tm.JointTrajectoryPoint() jtp.positions = positions_goal jtp.velocities = zeros(len(positions_goal)) jtp.time_from_start = rospy.Duration(duration) jt.points = [jtp] self.controller_pub.publish(jt) rospy.loginfo("%s: starting %.2f sec traj", self.controller_name, duration) self.pr2.start_thread(JustWaitThread(duration))
Example #8
Source File: joint_trajectory_action.py From intera_sdk with Apache License 2.0 | 6 votes |
def _get_bezier_point(self, b_matrix, idx, t, cmd_time, dimensions_dict): pnt = JointTrajectoryPoint() pnt.time_from_start = rospy.Duration(cmd_time) num_joints = b_matrix.shape[0] pnt.positions = [0.0] * num_joints if dimensions_dict['velocities']: pnt.velocities = [0.0] * num_joints if dimensions_dict['accelerations']: pnt.accelerations = [0.0] * num_joints for jnt in range(num_joints): b_point = bezier.bezier_point(b_matrix[jnt, :, :, :], idx, t) # Positions at specified time pnt.positions[jnt] = b_point[0] # Velocities at specified time if dimensions_dict['velocities']: pnt.velocities[jnt] = b_point[1] # Accelerations at specified time if dimensions_dict['accelerations']: pnt.accelerations[jnt] = b_point[-1] return pnt
Example #9
Source File: joint_trajectory_action.py From intera_sdk with Apache License 2.0 | 6 votes |
def _get_minjerk_point(self, m_matrix, idx, t, cmd_time, dimensions_dict): pnt = JointTrajectoryPoint() pnt.time_from_start = rospy.Duration(cmd_time) num_joints = m_matrix.shape[0] pnt.positions = [0.0] * num_joints if dimensions_dict['velocities']: pnt.velocities = [0.0] * num_joints if dimensions_dict['accelerations']: pnt.accelerations = [0.0] * num_joints for jnt in range(num_joints): m_point = minjerk.minjerk_point(m_matrix[jnt, :, :, :], idx, t) # Positions at specified time pnt.positions[jnt] = m_point[0] # Velocities at specified time if dimensions_dict['velocities']: pnt.velocities[jnt] = m_point[1] # Accelerations at specified time if dimensions_dict['accelerations']: pnt.accelerations[jnt] = m_point[-1] return pnt
Example #10
Source File: io_interface.py From intera_sdk with Apache License 2.0 | 6 votes |
def revalidate(self, timeout, invalidate_state=True, invalidate_config=True): """ invalidate the state and config topics, then wait up to timeout seconds for them to become valid again. return true if both the state and config topic data are valid """ if invalidate_state: self.invalidate_state() if invalidate_config: self.invalidate_config() timeout_time = rospy.Time.now() + rospy.Duration(timeout) while not self.is_state_valid() and not rospy.is_shutdown(): rospy.sleep(0.1) if timeout_time < rospy.Time.now(): rospy.logwarn("Timed out waiting for node interface valid...") return False return True
Example #11
Source File: calibrate_arm.py From intera_sdk with Apache License 2.0 | 6 votes |
def __init__(self, limb="right"): """ @param limb: Limb to run CalibrateArm on arm side. """ self._limb=limb self._client = actionlib.SimpleActionClient('/calibration_command', CalibrationCommandAction) # Waits until the action server has started up and started # listening for goals. server_up = self._client.wait_for_server(rospy.Duration(10.0)) if not server_up: rospy.logerr("Timed out waiting for Calibration" " Server to connect. Check your ROS networking" " and/or reboot the robot.") rospy.signal_shutdown("Timed out waiting for Action Server") sys.exit(1)
Example #12
Source File: joint_trajectory_file_playback.py From intera_sdk with Apache License 2.0 | 6 votes |
def wait(self): """ Waits for and verifies trajectory execution result """ #create a timeout for our trajectory execution #total time trajectory expected for trajectory execution plus a buffer last_time = self.goal.trajectory.points[-1].time_from_start.to_sec() time_buffer = rospy.get_param(self._param_ns + 'goal_time', 0.0) + 1.5 timeout = rospy.Duration(self._slow_move_offset + last_time + time_buffer) finish = self._limb_client.wait_for_result(timeout) result = (self._limb_client.get_result().error_code == 0) #verify result if all([finish, result]): return True else: msg = ("Trajectory action failed or did not finish before " "timeout/interrupt.") rospy.logwarn(msg) return False
Example #13
Source File: simtrack.py From prpy with BSD 3-Clause "New" or "Revised" License | 6 votes |
def _LocalToWorld(self,pose): """ Transform a pose from local frame to world frame @param pose The 4x4 transformation matrix containing the pose to transform @return The 4x4 transformation matrix describing the pose in world frame """ import rospy #Get pose w.r.t world frame self.listener.waitForTransform(self.world_frame,self.detection_frame, rospy.Time(),rospy.Duration(10)) t, r = self.listener.lookupTransform(self.world_frame,self.detection_frame, rospy.Time(0)) #Get relative transform between frames offset_to_world = numpy.matrix(transformations.quaternion_matrix(r)) offset_to_world[0,3] = t[0] offset_to_world[1,3] = t[1] offset_to_world[2,3] = t[2] #Compose with pose to get pose in world frame result = numpy.array(numpy.dot(offset_to_world, pose)) return result
Example #14
Source File: joint_trajectory_file_playback.py From intera_sdk with Apache License 2.0 | 6 votes |
def _add_point(self, positions, side, time): """ Appends trajectory with new point @param positions: joint positions @param side: limb to command point @param time: time from start for point in seconds """ #creates a point in trajectory with time_from_start and positions point = JointTrajectoryPoint() point.positions = copy(positions) point.time_from_start = rospy.Duration(time) if side == self.limb: self.goal.trajectory.points.append(point) elif self.gripper and side == self.gripper_name: self.grip.trajectory.points.append(point)
Example #15
Source File: costar_hyper_prediction.py From costar_plan with Apache License 2.0 | 6 votes |
def verify_update_rate(update_time_remaining, update_rate=10, minimum_update_rate_fraction_allowed=0.1, info=''): """ make sure at least 10% of time is remaining when updates are performed. we are converting to nanoseconds here since that is the unit in which all reasonable rates are expected to be measured """ update_duration_sec = 1.0 / update_rate minimum_allowed_remaining_time = update_duration_sec * minimum_update_rate_fraction_allowed min_remaining_duration = rospy.Duration(minimum_allowed_remaining_time) if update_time_remaining < min_remaining_duration: rospy.logwarn_throttle(1.0, 'Not maintaining requested update rate, there may be problems with the goal pose predictions!\n' ' Update rate is: ' + str(update_rate) + 'Hz, Duration is ' + str(update_duration_sec) + ' sec\n' + ' Minimum time allowed time remaining is: ' + str(minimum_allowed_remaining_time) + ' sec\n' + ' Actual remaining on this update was: ' + str(float(str(update_time_remaining))/1.0e9) + ' sec\n' + ' ' + info)
Example #16
Source File: run.py From costar_plan with Apache License 2.0 | 6 votes |
def verify_update_rate(update_time_remaining, update_rate=10, minimum_update_rate_fraction_allowed=0.1, info=''): """ make sure at least 10% of time is remaining when updates are performed. we are converting to nanoseconds here since that is the unit in which all reasonable rates are expected to be measured """ update_duration_sec = 1.0 / update_rate minimum_allowed_remaining_time = update_duration_sec * minimum_update_rate_fraction_allowed min_remaining_duration = rospy.Duration(minimum_allowed_remaining_time) if update_time_remaining < min_remaining_duration: rospy.logwarn_throttle(1.0, 'Not maintaining requested update rate, there may be gaps in the data log!\n' ' Update rate is: ' + str(update_rate) + 'Hz, Duration is '+ str(update_duration_sec) +' sec\n' + ' Minimum time allowed time remaining is: ' + str(minimum_allowed_remaining_time) + ' sec\n' + ' Actual remaining on this update was: ' + str(float(str(update_time_remaining))/1.0e9) + ' sec\n' + ' ' + info)
Example #17
Source File: estimate_depth.py From baxter_demos with Apache License 2.0 | 6 votes |
def solve_goal_pose(self, centroid): # Project centroid into 3D coordinates center = (centroid[0] - self.camera_x/2, centroid[1] - self.camera_y/2) vec = numpy.array( self.camera_model.projectPixelTo3dRay(center) ) # Scale it by the IR reading d_cam = ( self.ir_reading - self.min_ir_depth - self.object_height ) * vec d_cam = numpy.concatenate((d_cam, numpy.ones(1))) #print "Camera vector:", d_cam # Now transform into the world frame self.tf_listener.waitForTransform('/base', '/'+self.limb+'_hand_camera', rospy.Time(), rospy.Duration(4)) (trans, rot) = self.tf_listener.lookupTransform('/base', '/'+self.limb+'_hand_camera', rospy.Time()) camera_to_base = tf.transformations.compose_matrix(translate=trans, angles=tf.transformations.euler_from_quaternion(rot)) d_base = numpy.dot(camera_to_base, d_cam) return d_base[0:3]
Example #18
Source File: common.py From baxter_demos with Apache License 2.0 | 6 votes |
def __init__(self, limb): self.waiting = False self.jointnames = ['s0', 's1', 'e0', 'e1', 'w0', 'w1', 'w2'] ns = 'robot/limb/' + limb + '/' self._client = actionlib.SimpleActionClient( ns + "follow_joint_trajectory", FollowJointTrajectoryAction, ) self._goal = FollowJointTrajectoryGoal() server_up = self._client.wait_for_server(timeout=rospy.Duration(10.0)) if not server_up: rospy.logerr("Timed out waiting for Joint Trajectory" " Action Server to connect. Start the action server" " before running example.") rospy.signal_shutdown("Timed out waiting for Action Server") sys.exit(1) self.clear(limb)
Example #19
Source File: get_goal_poses.py From baxter_demos with Apache License 2.0 | 6 votes |
def solve_goal_point(self, centroid): # Find the centroid in the point cloud x = int(centroid[0]) y = int(centroid[1]) depth = self.get_depth(x, y) print depth # Get pixel points in camera units v = self.camera_model.projectPixelTo3dRay((x, y)) d_cam = numpy.concatenate( (depth*numpy.array(v), numpy.ones(1))).reshape((4, 1)) # TODO: is this the right frame transform? self.tf_listener.waitForTransform('/base', '/camera_depth_optical_frame', rospy.Time(), rospy.Duration(4)) (trans, rot) = self.tf_listener.lookupTransform('/base', '/camera_depth_optical_frame', rospy.Time()) camera_to_base = tf.transformations.compose_matrix(translate=trans, angles=tf.transformations.euler_from_quaternion(rot)) d_base = numpy.dot(camera_to_base, d_cam) d_base[2] -= params['object_height']/2.0 return d_base[0:3]
Example #20
Source File: visual_servo.py From baxter_demos with Apache License 2.0 | 6 votes |
def servo_xy(self): print "translating in XY at speed:", self.inc d = self.centroid - self.goal_pos self.tf_listener.waitForTransform('/'+self.limb+'_hand_camera', '/base', rospy.Time(), rospy.Duration(4.0)) (trans, rot) = self.tf_listener.lookupTransform('/'+self.limb+'_hand_camera', '/base', rospy.Time(0)) R = tf.transformations.quaternion_matrix(rot)[:3, :3] d = numpy.concatenate( (d, numpy.zeros(1)) ) d_rot = numpy.dot(R, d) direction = self.inc*d_rot / numpy.linalg.norm(d_rot) if not self.outOfRange(): direction[2] = self.inc else: direction[2] = 0 direction *= self.inc/ numpy.linalg.norm(direction) #print direction self.command_ik(direction)
Example #21
Source File: tf_helpers.py From mvp_grasp with BSD 3-Clause "New" or "Revised" License | 6 votes |
def convert_pose(pose, from_frame, to_frame): """ Convert a pose or transform between frames using tf. pose -> A geometry_msgs.msg/Pose that defines the robots position and orientation in a reference_frame from_frame -> A string that defines the original reference_frame of the robot to_frame -> A string that defines the desired reference_frame of the robot to convert to """ global tfBuffer, listener if tfBuffer is None or listener is None: _init_tf() try: trans = tfBuffer.lookup_transform(to_frame, from_frame, rospy.Time(0), rospy.Duration(1.0)) except (tf2_ros.LookupException, tf2_ros.ConnectivityException, tf2_ros.ExtrapolationException), e: print(e) rospy.logerr('FAILED TO GET TRANSFORM FROM %s to %s' % (to_frame, from_frame)) return None
Example #22
Source File: vncc.py From prpy with BSD 3-Clause "New" or "Revised" License | 6 votes |
def _LocalToWorld(self,pose): """ Transform a pose from local frame to world frame @param pose The 4x4 transformation matrix containing the pose to transform @return The 4x4 transformation matrix describing the pose in world frame """ #Get pose w.r.t world frame self.listener.waitForTransform(self.world_frame,self.detection_frame, rospy.Time(),rospy.Duration(10)) t, r = self.listener.lookupTransform(self.world_frame,self.detection_frame, rospy.Time(0)) #Get relative transform between frames offset_to_world = numpy.matrix(transformations.quaternion_matrix(r)) offset_to_world[0,3] = t[0] offset_to_world[1,3] = t[1] offset_to_world[2,3] = t[2] #Compose with pose to get pose in world frame result = numpy.array(numpy.dot(offset_to_world, pose)) return result
Example #23
Source File: joint_trajectory_client.py From intera_sdk with Apache License 2.0 | 6 votes |
def __init__(self, limb, joint_names): self._joint_names = joint_names ns = 'robot/limb/' + limb + '/' self._client = actionlib.SimpleActionClient( ns + "follow_joint_trajectory", FollowJointTrajectoryAction, ) self._goal = FollowJointTrajectoryGoal() self._goal_time_tolerance = rospy.Time(0.1) self._goal.goal_time_tolerance = self._goal_time_tolerance server_up = self._client.wait_for_server(timeout=rospy.Duration(10.0)) if not server_up: rospy.logerr("Timed out waiting for Joint Trajectory" " Action Server to connect. Start the action server" " before running example.") rospy.signal_shutdown("Timed out waiting for Action Server") sys.exit(1) self.clear(limb)
Example #24
Source File: joystick_interface.py From niryo_one_ros with GNU General Public License v3.0 | 5 votes |
def publish_joint_trajectory(self, positions, duration): msg = JointTrajectory() msg.header.stamp = rospy.Time.now() msg.joint_names = ['joint_1', 'joint_2', 'joint_3', 'joint_4', 'joint_5', 'joint_6'] point = JointTrajectoryPoint() point.positions = positions point.time_from_start = rospy.Duration(duration) msg.points = [point] self.joint_trajectory_publisher.publish(msg)
Example #25
Source File: utils.py From Attentional-PointNet with GNU General Public License v3.0 | 5 votes |
def rosMarker(loc,idx,color = "green",dur=0.2): val = 0.1 if (color == "green"): val = 0.9 # print(loc[3]) quat = quaternion_from_euler(0, 0, loc[3]) marker = Marker() # marker.header.frame_id = "map" marker.header.frame_id = "/zoe/base_link" marker.type = marker.CUBE marker.action = marker.ADD marker.scale.x = 2.5 marker.scale.y = 5 marker.scale.z = 2 if (len(loc) > 4): marker.scale.x = loc[5] marker.scale.y = loc[6] marker.scale.z = loc[4] marker.color.a = 0.4 marker.color.r = 1 - val marker.color.g = val marker.color.b = 0.2 marker.pose.orientation.x = quat[0] marker.pose.orientation.y = quat[1] marker.pose.orientation.z = quat[2] marker.pose.orientation.w = quat[3] marker.pose.position.x = loc[0] marker.pose.position.y = loc[1] marker.pose.position.z = loc[2] marker.lifetime = rospy.Duration(dur) marker.id = idx return marker # rospy.sleep(0.008) # # while (True): # for i in range(2): # marker_pub.publish(marker) # # rospy.sleep(0.1)
Example #26
Source File: joystick_interface.py From niryo_one_ros with GNU General Public License v3.0 | 5 votes |
def __init__(self): self.joy_enabled = False self.joint_mode = JointMode() joy_subscriber = rospy.Subscriber('joy', Joy, self.callback_joy) self.joystick_server = rospy.Service( '/niryo_one/joystick_interface/enable', SetInt, self.callback_enable_joystick) self.joystick_enabled_publisher = rospy.Publisher('/niryo_one/joystick_interface/is_enabled', Bool, queue_size=1) rospy.Timer(rospy.Duration(2), self.publish_joystick_enabled)
Example #27
Source File: sequence_autorun.py From niryo_one_ros with GNU General Public License v3.0 | 5 votes |
def __init__(self): self.sequence_autorun_status_file = rospy.get_param("~sequence_autorun_status_file") self.sequence_autorun_status_file = os.path.expanduser(self.sequence_autorun_status_file) self.lock = Lock() self.enabled, self.mode, self.sequence_ids = self.read_sequence_autorun_status() self.activated = False self.sequence_execution_index = -1 self.flag_send_robot_to_home_position = False self.cancel_sequence = False self.is_sequence_running = False self.calibration_needed = None self.calibration_in_progress = None self.hardware_status_subscriber = rospy.Subscriber( '/niryo_one/hardware_status', HardwareStatus, self.sub_hardware_status) self.learning_mode_on = None self.learning_mode_subscriber = rospy.Subscriber( '/niryo_one/learning_mode', Bool, self.sub_learning_mode) # Wait for sequence action server to finish setup self.sequence_action_client = actionlib.SimpleActionClient('niryo_one/sequences/execute', SequenceAction) self.sequence_action_client.wait_for_server() self.sequence_autorun_status_publisher = rospy.Publisher( '/niryo_one/sequences/sequence_autorun_status', SequenceAutorunStatus, queue_size=10) self.timer = rospy.Timer(rospy.Duration(3.0), self.publish_sequence_autorun_status) self.set_sequence_autorun_server = rospy.Service( '/niryo_one/sequences/set_sequence_autorun', SetSequenceAutorun, self.callback_set_sequence_autorun) self.trigger_sequence_autorun = rospy.Service( '/niryo_one/sequences/trigger_sequence_autorun', SetInt, self.callback_trigger_sequence_autorun) self.sequence_autorun_thread = Thread(name="worker", target=self.execute_sequence_autorun_thread) self.sequence_autorun_thread.setDaemon(True) self.sequence_autorun_thread.start() rospy.loginfo('Init Sequence autorun OK')
Example #28
Source File: niryo_one_api.py From niryo_one_ros with GNU General Public License v3.0 | 5 votes |
def execute_action(self, action_name, action_msg_type, goal): client = actionlib.SimpleActionClient(action_name, action_msg_type) # Connect to server if not client.wait_for_server(rospy.Duration(self.action_connection_timeout)): raise NiryoOneException('Action Server is not up : ' + str(action_name)) # Send goal and check response client.send_goal(goal) if not client.wait_for_result(timeout=rospy.Duration(self.action_execute_timeout)): client.cancel_goal() client.stop_tracking_goal() raise NiryoOneException('Action Server timeout : ' + str(action_name)) goal_state = client.get_state() response = client.get_result() if goal_state != GoalStatus.SUCCEEDED: client.stop_tracking_goal() if goal_state == GoalStatus.REJECTED: raise NiryoOneException('Goal has been rejected : ' + str(response.message)) elif goal_state == GoalStatus.ABORTED: raise NiryoOneException('Goal has been aborted : ' + str(response.message)) elif goal_state != GoalStatus.SUCCEEDED: raise NiryoOneException('Error when processing goal : ' + str(response.message)) return response.message # # Interface #
Example #29
Source File: marker_tools.py From RoboND-Perception-Exercises with MIT License | 5 votes |
def make_label(text, position, id = 0 ,duration = 5.0, color=[1.0,1.0,1.0]): """ Helper function for generating visualization markers. Args: text (str): Text string to be displayed. position (list): List containing [x,y,z] positions id (int): Integer identifying the label duration (rospy.Duration): How long the label will be displayed for color (list): List of label color floats from 0 to 1 [r,g,b] Returns: Marker: A text view marker which can be published to RViz """ marker = Marker() marker.header.frame_id = '/world' marker.id = id marker.type = marker.TEXT_VIEW_FACING marker.text = text marker.action = marker.ADD marker.scale.x = 0.05 marker.scale.y = 0.05 marker.scale.z = 0.05 marker.color.a = 1.0 marker.color.r = color[0] marker.color.g = color[1] marker.color.b = color[2] marker.lifetime = rospy.Duration(duration) marker.pose.orientation.w = 1.0 marker.pose.position.x = position[0] marker.pose.position.y = position[1] marker.pose.position.z = position[2] return marker
Example #30
Source File: trackManager.py From crazyflieROS with GNU General Public License v2.0 | 5 votes |
def getKinectPoint(self): now = rospy.Time.now() try: self.sub_tf.waitForTransform("/world","/camera_depth_optical_frame", now, rospy.Duration(0.1)) (t, r) = self.sub_tf.lookupTransform("/world","/camera_depth_optical_frame", now) return list(t), list(r) except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException): rospy.logwarn('Could not get ground kinect position in the world') pass