1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91
| import pyrealsense2 as rs import numpy as np import cv2 import serial import time
class RobotArmController: def __init__(self, port, baudrate=9600): self.ser = serial.Serial(port, baudrate) time.sleep(2)
def send_command(self, command): self.ser.write(f"{command}".encode()) print(f"发送: {command}")
def move_to(self, x, y, z): command = f"G0 {x} {y} {z}" self.send_command(command)
def close(self): self.ser.close()
def get_depth_at_point(depth_frame, point): return depth_frame.get_distance(point[0], point[1])
def main(): pipeline = rs.pipeline() config = rs.config() config.enable_stream(rs.stream.depth, 640, 480, rs.format.z16, 30) config.enable_stream(rs.stream.color, 640, 480, rs.format.bgr8, 30) pipeline.start(config)
controller = RobotArmController(port='COM5')
camera_points = [] arm_points = []
try: while True: frames = pipeline.wait_for_frames() depth_frame = frames.get_depth_frame() color_frame = frames.get_color_frame() if not depth_frame or not color_frame: continue
depth_image = np.asanyarray(depth_frame.get_data()) color_image = np.asanyarray(color_frame.get_data()) cv2.imshow('Color Image', color_image)
def get_point(event, x, y, flags, param): if event == cv2.EVENT_LBUTTONDOWN: depth_value = get_depth_at_point(depth_frame, (x, y)) print(f"Depth at ({x}, {y}): {depth_value:.2f} meters") camera_points.append([x, y, depth_value])
controller.move_to(x, y, depth_value) arm_points.append([x, y, depth_value])
cv2.setMouseCallback('Color Image', get_point)
key = cv2.waitKey(1) if key & 0xFF == ord('q') or key == 27: cv2.destroyAllWindows() break
if len(camera_points) > 0 and len(arm_points) > 0: camera_points = np.array(camera_points) arm_points = np.array(arm_points)
A = np.hstack((camera_points, np.ones((camera_points.shape[0], 1)))) B = arm_points X, _, _, _ = np.linalg.lstsq(A, B, rcond=None)
print("Hand-eye calibration matrix:") print(X)
finally: controller.close() pipeline.stop()
if __name__ == "__main__": main()
|