0

尝试在我的 ROS 项目中配置基本颜色检测时,我陷入了困境:我有两个 python 脚本,其中“programa.py”是我的主要机器人程序,“camara.py”是我的 openCV 颜色检测脚本。

现在我可以在 camara.py 运行时更改发布字符串消息的颜色检测:

rostopic pub -1 /detect_object_color std_msgs/String "purple"

我想以某种方式包含在此脚本运行时从我的“programa.py”发送此消息的选项(主函数的第 4 行)。

这些都是程序代码:programa.py

import sys
import tf
import time
import rospy
import camara
import moveit_commander
import moveit_msgs.msg
import geometry_msgs.msg
import cv2
from math import pi
from std_srvs.srv import Empty
from moveit_msgs.msg import Grasp, GripperTranslation, PlaceLocation
from cv_bridge import CvBridge
from sensor_msgs.msg import Image




class ExampleMoveItTrajectories(object):
  """ExampleMoveItTrajectories"""
  def __init__(self):

    # Initialize the node
    super(ExampleMoveItTrajectories, self).__init__()
    moveit_commander.roscpp_initialize(sys.argv)
    rospy.init_node('example_move_it_trajectories')

    self.is_gripper_present = rospy.get_param(rospy.get_namespace() + "is_gripper_present", True)


    # Create the MoveItInterface necessary objects
    arm_group_name = "arm"
    self.robot = moveit_commander.RobotCommander("robot_description")
    print self.robot.get_group_names()
    self.scene = moveit_commander.PlanningSceneInterface(ns=rospy.get_namespace())
    self.arm_group = moveit_commander.MoveGroupCommander(arm_group_name, ns=rospy.get_namespace())
    self.display_trajectory_publisher = rospy.Publisher(rospy.get_namespace() + 'move_group/display_planned_path',
                                                   moveit_msgs.msg.DisplayTrajectory,
                                                   queue_size=20)

    if self.is_gripper_present:
      gripper_group_name = "hand"
      self.gripper_group = moveit_commander.MoveGroupCommander(gripper_group_name, ns=rospy.get_namespace())

    rospy.loginfo("Initializing node in namespace " + rospy.get_namespace())

  def reach_named_position(self, target):
    arm_group = self.arm_group
    
    # Going to one of those targets
    rospy.loginfo("Going to named target " + target)
    # Set the target
    arm_group.set_named_target(target)
    # Plan the trajectory
    planned_path1 = arm_group.plan()
    # Execute the trajectory and block while it's not finished
    arm_group.execute(planned_path1, wait=True)

  def reach_joint_angles(self, tolerance):
    arm_group = self.arm_group

    # Get the current joint positions
    joint_positions = arm_group.get_current_joint_values()
    rospy.loginfo("Printing current joint positions before movement :")
    for p in joint_positions: rospy.loginfo(p)

    # Set the goal joint tolerance

    self.arm_group.set_goal_joint_tolerance(tolerance)
    

    # Set the joint target configuration
    joint_positions[0] = 0
    joint_positions[1] = -pi/4
    joint_positions[2] = 0
    joint_positions[3] = -pi/2
    joint_positions[4] = 0
    joint_positions[5] = pi/3
    joint_positions[6] = 0
    arm_group.set_joint_value_target(joint_positions)
    
    # Plan and execute in one command
    arm_group.go(wait=True)

    # Show joint positions after movement
    new_joint_positions = arm_group.get_current_joint_values()
    rospy.loginfo("Printing current joint positions after movement :")
    for p in new_joint_positions: rospy.loginfo(p)

  def get_cartesian_pose(self):
    arm_group = self.arm_group

    
    # Get the current pose and display it
    pose = arm_group.get_current_pose()
    rospy.loginfo("Actual cartesian pose is : ")
    rospy.loginfo(pose.pose)

    return pose.pose
    

  def reach_cartesian_pose(self, pose, tolerance, constraints):
    arm_group = self.arm_group

        # Allow some leeway in position (meters) and orientation (radians)
    
    print("end_effector_link")
    print(arm_group.get_end_effector_link())
    arm_group.set_pose_reference_frame

    gripper_group = self.gripper_group

    print(arm_group.get_end_effector_link())
    arm_group.set_end_effector_link("panda_TCP")

    print(arm_group.get_end_effector_link())

    # Set the tolerance
    arm_group.set_goal_position_tolerance(tolerance)

    # Set the trajectory constraint if one is specified
    if constraints is not None:
      #import pdb; pdb.set_trace()
      arm_group.set_path_constraints(constraints)

    # Get the current Cartesian Position
    arm_group.set_pose_target(pose)

    # Plan and execute
    rospy.loginfo("Planning and going to the Cartesian Pose")
    arm_group.go(wait=True)

  def reach_gripper_position(self, relative_position):
    gripper_group = self.gripper_group
    
    # We only have to move this joint because all others are mimic!
    gripper_joint1 = self.robot.get_joint("panda_finger_joint1")
    gripper_joint2 = self.robot.get_joint("panda_finger_joint2")
    gripper_max_absolute_pos = gripper_joint1.max_bound()
    print gripper_max_absolute_pos 
    gripper_joint1.move(relative_position * gripper_max_absolute_pos, wait=True)
    gripper_joint2.move(relative_position * gripper_max_absolute_pos, wait=True)

    # crear comando para posiciones de la pizan
  def send_gripper_closed_command_moveit(self):
    gripper_joints_closed = [0.0, 0.0]

    gripper_group = self.gripper_group

    gripper_group.set_joint_value_target(gripper_joints_closed)
    plan = gripper_group.plan()
    gripper_group.execute(plan, wait=True)

  def send_gripper_open_command_moveit(self):
    gripper_joints_open = [0.04, 0.04]

    gripper_group = self.gripper_group

    gripper_group.set_joint_value_target(gripper_joints_open)
    plan = gripper_group.plan()
    gripper_group.execute(plan, wait=True)


  def send_gripper_command(self):
    pub = rospy.Publisher('/panda_hand_controller/command',
                          Float64MultiArray, queue_size=1)

    msg = Float64MultiArray()
    msg.layout.dim = [MultiArrayDimension('', 2, 1)]

    
    for i in range(100):
      msg.data = [0.04, 0.04]
      pub.publish(msg)
      rospy.sleep(1)
      msg.data = [0.00, 0.00]
      pub.publish(msg)
      rospy.sleep(1)

  def set_max_velocity_scaling_factor(self, value):
        """ Set a scaling factor for optionally reducing the maximum joint velocity. Allowed values are in (0,1]. """
        if value > 0 and value <= 1:
            self.arm_group.set_max_velocity_scaling_factor(0.5)
        else:
            raise MoveItCommanderException("Expected value in the range from 0 to 1 for scaling factor" )    
 
    

def main():


  #camara.listener()

  example = ExampleMoveItTrajectories()

  

  # HOME
  example.reach_named_position("home")

  #INTRODUCE HERE THE COLOR CHANGE MESSAGE

  rospy.loginfo("PULSAR PARA COMENZAR PROGRAMA")
  rospy.loginfo("DETECCION AZUL")
  raw_input()

  #PINZA ABIERTA
  example.send_gripper_open_command_moveit()

  #POSICIONAMIENTO CAMARA pieza1
  actual_pose = example.get_cartesian_pose()

  roll= 0.0
  pitch = pi
  yaw = -pi/2 

  quaternion= tf.transformations.quaternion_from_euler(roll, pitch, yaw)

  actual_pose.position.x = 0.30
  actual_pose.position.y = 0.6
  actual_pose.position.z = 1.2
  actual_pose.orientation.x = quaternion[0]
  actual_pose.orientation.y = quaternion[1]
  actual_pose.orientation.z = quaternion[2]
  actual_pose.orientation.w = quaternion[3]
  example.reach_cartesian_pose(pose=actual_pose, tolerance=0.001, constraints=None)
  

  #POSICIONAMIENTO GRASPING pieza 1
 
  actual_pose = example.get_cartesian_pose()

  roll= 0.0
  pitch = pi
  yaw = -pi/2 

  quaternion= tf.transformations.quaternion_from_euler(roll, pitch, yaw)
  actual_pose.position.x = 0.30
  actual_pose.position.y = 0.6
  actual_pose.position.z = 1.05
  actual_pose.orientation.x = quaternion[0]
  actual_pose.orientation.y = quaternion[1]
  actual_pose.orientation.z = quaternion[2]
  actual_pose.orientation.w = quaternion[3]
  example.reach_cartesian_pose(pose=actual_pose, tolerance=0.001, constraints=None)

  #PINZA CERRADA
  example.send_gripper_closed_command_moveit()
  #time.sleep(5)

 #POSICIONAMIENTO PIEZA 1 ARRIBA
  actual_pose = example.get_cartesian_pose()

  roll= 0.0
  pitch = pi
  yaw = -pi/2 

  quaternion= tf.transformations.quaternion_from_euler(roll, pitch, yaw)

  actual_pose.position.x = 0.3
  actual_pose.position.y = 0.6
  actual_pose.position.z = 1.2
  actual_pose.orientation.x = quaternion[0]
  actual_pose.orientation.y = quaternion[1]
  actual_pose.orientation.z = quaternion[2]
  actual_pose.orientation.w = quaternion[3]
  example.reach_cartesian_pose(pose=actual_pose, tolerance=0.001, constraints=None)

  #POSICIONAMIENTO BASURA1 
  actual_pose = example.get_cartesian_pose()

  roll= 0.0
  pitch = pi
  yaw = -pi/2 

  quaternion= tf.transformations.quaternion_from_euler(roll, pitch, yaw)

  actual_pose.position.x = 0.5
  actual_pose.position.y = 0.0
  actual_pose.position.z = 1.4
  actual_pose.orientation.x = quaternion[0]
  actual_pose.orientation.y = quaternion[1]
  actual_pose.orientation.z = quaternion[2]
  actual_pose.orientation.w = quaternion[3]
  example.reach_cartesian_pose(pose=actual_pose, tolerance=0.001, constraints=None)
  
  
   #PINZA ABIERTA
  example.send_gripper_open_command_moveit()

  # HOME
  example.reach_named_position("home")

  rospy.loginfo("CAMBIO DETECCION COLOR ROJO")
  raw_input()

  #POSICIONAMIENTO CAMARA pieza2
  actual_pose = example.get_cartesian_pose()

  roll= 0.0
  pitch = pi
  yaw = -pi/2 

  quaternion= tf.transformations.quaternion_from_euler(roll, pitch, yaw)

  actual_pose.position.x = -0.30
  actual_pose.position.y = 0.6
  actual_pose.position.z = 1.2
  actual_pose.orientation.x = quaternion[0]
  actual_pose.orientation.y = quaternion[1]
  actual_pose.orientation.z = quaternion[2]
  actual_pose.orientation.w = quaternion[3]
  example.reach_cartesian_pose(pose=actual_pose, tolerance=0.001, constraints=None)
  

  #POSICIONAMIENTO GRASPING pieza 2
 
  actual_pose = example.get_cartesian_pose()

  roll= 0.0
  pitch = pi
  yaw = -pi/2 

  quaternion= tf.transformations.quaternion_from_euler(roll, pitch, yaw)
  actual_pose.position.x = -0.30
  actual_pose.position.y = 0.6
  actual_pose.position.z = 1.05
  actual_pose.orientation.x = quaternion[0]
  actual_pose.orientation.y = quaternion[1]
  actual_pose.orientation.z = quaternion[2]
  actual_pose.orientation.w = quaternion[3]
  example.reach_cartesian_pose(pose=actual_pose, tolerance=0.001, constraints=None)

  #PINZA CERRADA
  example.send_gripper_closed_command_moveit()
  #time.sleep(5)

 #POSICIONAMIENTO PIEZA 2 ARRIBA
  actual_pose = example.get_cartesian_pose()

  roll= 0.0
  pitch = pi
  yaw = -pi/2 

  quaternion= tf.transformations.quaternion_from_euler(roll, pitch, yaw)

  actual_pose.position.x = -0.3
  actual_pose.position.y = 0.6
  actual_pose.position.z = 1.2
  actual_pose.orientation.x = quaternion[0]
  actual_pose.orientation.y = quaternion[1]
  actual_pose.orientation.z = quaternion[2]
  actual_pose.orientation.w = quaternion[3]
  example.reach_cartesian_pose(pose=actual_pose, tolerance=0.001, constraints=None)
  
  #HOME
  example.reach_named_position("home")

  #POSICIONAMIENTO BASURA pieza 2 
  actual_pose = example.get_cartesian_pose()

  roll= 0.0
  pitch = pi
  yaw = -pi/2 

  quaternion= tf.transformations.quaternion_from_euler(roll, pitch, yaw)

  actual_pose.position.x = 0.5
  actual_pose.position.y = 0.0
  actual_pose.position.z = 1.4
  actual_pose.orientation.x = quaternion[0]
  actual_pose.orientation.y = quaternion[1]
  actual_pose.orientation.z = quaternion[2]
  actual_pose.orientation.w = quaternion[3]
  example.reach_cartesian_pose(pose=actual_pose, tolerance=0.001, constraints=None)
  
  
  #SOLTAR PIEZA 2 
  example.send_gripper_open_command_moveit()

  
  # HOME
  example.reach_named_position("home")

  rospy.loginfo("DETECCION MORADO")
  raw_input()

  #POSICIONAMIENTO CAMARA pieza3
  actual_pose = example.get_cartesian_pose()

  roll= 0.0
  pitch = pi
  yaw = -pi/2 

  quaternion= tf.transformations.quaternion_from_euler(roll, pitch, yaw)

  actual_pose.position.x = -0.30
  actual_pose.position.y = 0.7
  actual_pose.position.z = 1.2
  actual_pose.orientation.x = quaternion[0]
  actual_pose.orientation.y = quaternion[1]
  actual_pose.orientation.z = quaternion[2]
  actual_pose.orientation.w = quaternion[3]
  example.reach_cartesian_pose(pose=actual_pose, tolerance=0.001, constraints=None)
  

  #POSICIONAMIENTO GRASPING pieza 3
 
  actual_pose = example.get_cartesian_pose()

  roll= 0.0
  pitch = pi
  yaw = -pi/2 

  quaternion= tf.transformations.quaternion_from_euler(roll, pitch, yaw)
  actual_pose.position.x = -0.30
  actual_pose.position.y = 0.7
  actual_pose.position.z = 1.05
  actual_pose.orientation.x = quaternion[0]
  actual_pose.orientation.y = quaternion[1]
  actual_pose.orientation.z = quaternion[2]
  actual_pose.orientation.w = quaternion[3]
  example.reach_cartesian_pose(pose=actual_pose, tolerance=0.001, constraints=None)

  #PINZA CERRADA
  example.send_gripper_closed_command_moveit()
  #time.sleep(5)

 #POSICIONAMIENTO PIEZA 3 ARRIBA
  actual_pose = example.get_cartesian_pose()

  roll= 0.0
  pitch = pi
  yaw = -pi/2 

  quaternion= tf.transformations.quaternion_from_euler(roll, pitch, yaw)

  actual_pose.position.x = -0.3
  actual_pose.position.y = 0.7
  actual_pose.position.z = 1.2
  actual_pose.orientation.x = quaternion[0]
  actual_pose.orientation.y = quaternion[1]
  actual_pose.orientation.z = quaternion[2]
  actual_pose.orientation.w = quaternion[3]
  example.reach_cartesian_pose(pose=actual_pose, tolerance=0.001, constraints=None)
  
  #HOME
  example.reach_named_position("home")

  #POSICIONAMIENTO BASURA pieza 3 
  actual_pose = example.get_cartesian_pose()

  roll= 0.0
  pitch = pi
  yaw = -pi/2 

  quaternion= tf.transformations.quaternion_from_euler(roll, pitch, yaw)

  actual_pose.position.x = -0.5
  actual_pose.position.y = 0.0
  actual_pose.position.z = 1.4
  actual_pose.orientation.x = quaternion[0]
  actual_pose.orientation.y = quaternion[1]
  actual_pose.orientation.z = quaternion[2]
  actual_pose.orientation.w = quaternion[3]
  example.reach_cartesian_pose(pose=actual_pose, tolerance=0.001, constraints=None)
  
  
  #SOLTAR PIEZA 3 
  example.send_gripper_open_command_moveit()

  # HOME
  example.reach_named_position("home")

  rospy.loginfo("DETECCION VERDE")
  raw_input()

  #POSICIONAMIENTO CAMARA pieza4
  actual_pose = example.get_cartesian_pose()

  roll= 0.0
  pitch = pi
  yaw = -pi/2 

  quaternion= tf.transformations.quaternion_from_euler(roll, pitch, yaw)

  actual_pose.position.x = 0.30
  actual_pose.position.y = 0.7
  actual_pose.position.z = 1.2
  actual_pose.orientation.x = quaternion[0]
  actual_pose.orientation.y = quaternion[1]
  actual_pose.orientation.z = quaternion[2]
  actual_pose.orientation.w = quaternion[3]
  example.reach_cartesian_pose(pose=actual_pose, tolerance=0.001, constraints=None)
  

  #POSICIONAMIENTO GRASPING pieza 4
 
  actual_pose = example.get_cartesian_pose()

  roll= 0.0
  pitch = pi
  yaw = -pi/2 

  quaternion= tf.transformations.quaternion_from_euler(roll, pitch, yaw)
  actual_pose.position.x = 0.30
  actual_pose.position.y = 0.7
  actual_pose.position.z = 1.05
  actual_pose.orientation.x = quaternion[0]
  actual_pose.orientation.y = quaternion[1]
  actual_pose.orientation.z = quaternion[2]
  actual_pose.orientation.w = quaternion[3]
  example.reach_cartesian_pose(pose=actual_pose, tolerance=0.001, constraints=None)

  #PINZA CERRADA
  example.send_gripper_closed_command_moveit()
  #time.sleep(5)

 #POSICIONAMIENTO PIEZA 4 ARRIBA
  actual_pose = example.get_cartesian_pose()

  roll= 0.0
  pitch = pi
  yaw = -pi/2 

  quaternion= tf.transformations.quaternion_from_euler(roll, pitch, yaw)

  actual_pose.position.x = 0.3
  actual_pose.position.y = 0.7
  actual_pose.position.z = 1.2
  actual_pose.orientation.x = quaternion[0]
  actual_pose.orientation.y = quaternion[1]
  actual_pose.orientation.z = quaternion[2]
  actual_pose.orientation.w = quaternion[3]
  example.reach_cartesian_pose(pose=actual_pose, tolerance=0.001, constraints=None)
  
  #HOME
  example.reach_named_position("home")
  
  #POSICIONAMIENTO BASURA pieza 4 
  actual_pose = example.get_cartesian_pose()

  roll= 0.0
  pitch = pi
  yaw = -pi/2 

  quaternion= tf.transformations.quaternion_from_euler(roll, pitch, yaw)

  actual_pose.position.x = -0.5
  actual_pose.position.y = 0.0
  actual_pose.position.z = 1.4
  actual_pose.orientation.x = quaternion[0]
  actual_pose.orientation.y = quaternion[1]
  actual_pose.orientation.z = quaternion[2]
  actual_pose.orientation.w = quaternion[3]
  example.reach_cartesian_pose(pose=actual_pose, tolerance=0.001, constraints=None)
  
  
  #SOLTAR PIEZA 4 
  example.send_gripper_open_command_moveit()

  # HOME
  example.reach_named_position("home")


  #example.send_gripper_command()

  #rospy.loginfo("Press any key to open the gripper sub example")
  #raw_input()
  #example.reach_gripper_position(0)

  #rospy.loginfo("Press any key to bring the gripper to half-closed sub example")
  #raw_input()
  #example.reach_gripper_position(1.0)

  #rospy.loginfo("Press any key to start Named Target Vertical sub example")
 # raw_input()
 # example.reach_named_position("up")

 # rospy.loginfo("Press any key to start Reach Joint Angles sub example")
 # raw_input()
 # example.reach_joint_angles(tolerance=0.01) #rad

  

  #rospy.loginfo("Press any key to start Reach Cartesian Pose sub example")
  #raw_input()
  #actual_pose = example.get_cartesian_pose()
  #actual_pose.position.x = 0.0
  #actual_pose.position.y = -1
  #actual_pose.position.z = 1.02
  #example.reach_cartesian_pose(pose=actual_pose, tolerance=0.01, constraints=None)

  
  
  if example.is_gripper_present:

    rospy.loginfo("Press any key to start Reach Cartesian Pose With Constraints sub example")
    raw_input()
    
    # Get actual pose
    actual_pose = example.get_cartesian_pose()
    actual_pose.position.z -= 0.005
    
    # Orientation constraint (we want the end effector to stay the same orientation)
    constraints = moveit_msgs.msg.Constraints()
    orientation_constraint = moveit_msgs.msg.OrientationConstraint()
    orientation_constraint.orientation = actual_pose.orientation
    constraints.orientation_constraints.append(orientation_constraint)

    # Send the goal
    example.reach_cartesian_pose(pose=actual_pose, tolerance=0.01, constraints=constraints)
    


if __name__ == '__main__':
  main()

卡马拉.py:

from __future__ import print_function

import rospy
import cv2
import numpy as np
from std_msgs.msg import String
from sensor_msgs.msg import Image
from geometry_msgs.msg import Point
from cv_bridge import CvBridge


bridge = CvBridge()
pub=None

lowerb = np.array ([0 , 100 , 100])
upperb = np.array ([0 , 255 , 255])

def callback_color(data):
    global lowerb, upperb
    if data.data == "red":
        lowerb[0]=0
        upperb[0]=10
        pass
    elif data.data == "green":
        lowerb[0]=40
        upperb[0]=80
        pass
    elif data.data == "blue":
        lowerb[0]=120
        upperb[0]=130
        pass
    elif data.data == "purple":
        lowerb[0]=140
        upperb[0]=180
        pass

def callback(data):
    cv_image = bridge.imgmsg_to_cv2(data, desired_encoding='passthrough')
    cv_image_hsv = cv2.cvtColor (cv_image , cv2.COLOR_BGR2HSV)
    mask = cv2.inRange (cv_image_hsv , lowerb , upperb)

    rows, cols =np.where (mask!=0)

    point=Point()
    if rows.shape[0]:
        point.x=np.mean(rows)
        point.y=np.mean(cols)
    pub.publish(point)
    print(point)

    cv2.imshow("bgr", cv_image)
    cv2.imshow("mask", mask)
    cv2.waitKey(1)

def listener():
    global pub
    rospy.init_node('listener', anonymous=True)
    pub = rospy.Publisher('posicion_objeto', Point, queue_size=10)
    rospy.Subscriber("/panda/camera1/image_raw", Image, callback)
    rospy.Subscriber("/detect_object_color", String, callback_color)
    rospy.spin()



if __name__ == '__main__':
    listener()

任何提示或建议将不胜感激。提前致谢

4

1 回答 1

0

你可以为此写一个出版商。

在你的 init 中定义一个发布者

color_pub = rospy.Publisher("/detect_object_color", String, queue_size=10)

并使用您的颜色名称编写发布命令,无论何时何地您想发布。

color_pub.publish("purple")
于 2020-12-11T17:51:08.937 回答