In this lesson we learn how to use OpenCV on the Jetson Xavier NX to track an object of interest in with two cameras on two Pan/Tilt servo brackets. The system tracks based on HSV color space, but the same basic setup could be used with other object detection algorithms. In this project we are using the Jetson Xavier NX, which you can pick up HERE. You will also need to of the bracket/servo kits, which you can get HERE, and then two Raspberry Pi Version two cameras, available HERE.
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 |
import cv2 import numpy as np import time from adafruit_servokit import ServoKit print(cv2.__version__) timeMark=time.time() dtFIL=0 def nothing(x): pass cv2.namedWindow('TrackBars') cv2.moveWindow('TrackBars',1320,0) cv2.createTrackbar('hueLower', 'TrackBars',100,179,nothing) cv2.createTrackbar('hueUpper', 'TrackBars',116,179,nothing) cv2.createTrackbar('satLow', 'TrackBars',160,255,nothing) cv2.createTrackbar('satHigh', 'TrackBars',255,255,nothing) cv2.createTrackbar('valLow', 'TrackBars',150,255,nothing) cv2.createTrackbar('valHigh', 'TrackBars',255,255,nothing) kit=ServoKit(channels=16) tilt1=90 pan1=90 tilt2=90 pan2=90 kit.servo[0].angle=pan1 kit.servo[1].angle=tilt1 kit.servo[2].angle=pan2 kit.servo[3].angle=tilt2 width=720 height=480 flip=2 font=cv2.FONT_HERSHEY_SIMPLEX camSet1='nvarguscamerasrc sensor-id=0 ee-mode=1 ee-strength=0 tnr-mode=2 tnr-strength=1 wbmode=3 ! video/x-raw(memory:NVMM), width=3264, height=2464, framerate=21/1,format=NV12 ! nvvidconv flip-method='+str(flip)+' ! video/x-raw, width='+str(width)+', height='+str(height)+', format=BGRx ! videoconvert ! video/x-raw, format=BGR ! videobalance contrast=1.3 brightness=-.2 saturation=1.2 ! appsink drop=True' camSet2='nvarguscamerasrc sensor-id=1 ee-mode=1 ee-strength=0 tnr-mode=2 tnr-strength=1 wbmode=3 ! video/x-raw(memory:NVMM), width=3264, height=2464, framerate=21/1,format=NV12 ! nvvidconv flip-method='+str(flip)+' ! video/x-raw, width='+str(width)+', height='+str(height)+', format=BGRx ! videoconvert ! video/x-raw, format=BGR ! videobalance contrast=1.3 brightness=-.2 saturation=1.2 ! appsink drop=True' #camSet='nvarguscamerasrc sensor-id=0 ! video/x-raw(memory:NVMM), width=3264, height=2464, framerate=21/1,format=NV12 ! nvvidconv flip-method='+str(flip)+' ! video/x-raw, width='+str(width)+', height='+str(height)+', format=BGRx ! videoconvert ! video/x-raw, format=BGR ! appsink' #camSet ='v4l2src device=/dev/video1 ! video/x-raw,width='+str(width)+',height='+str(height)+',framerate=20/1 ! videoconvert ! appsink' cam1=cv2.VideoCapture(camSet1) cam2=cv2.VideoCapture(camSet2) while True: _, frame1 = cam1.read() _, frame2 = cam2.read() hsv1=cv2.cvtColor(frame1,cv2.COLOR_BGR2HSV) hsv2=cv2.cvtColor(frame2,cv2.COLOR_BGR2HSV) hueLow=cv2.getTrackbarPos('hueLower', 'TrackBars') hueUp=cv2.getTrackbarPos('hueUpper', 'TrackBars') Ls=cv2.getTrackbarPos('satLow', 'TrackBars') Us=cv2.getTrackbarPos('satHigh', 'TrackBars') Lv=cv2.getTrackbarPos('valLow', 'TrackBars') Uv=cv2.getTrackbarPos('valHigh', 'TrackBars') l_b=np.array([hueLow,Ls,Lv]) u_b=np.array([hueUp,Us,Uv]) FGmask1=cv2.inRange(hsv1,l_b,u_b) FGmask2=cv2.inRange(hsv2,l_b,u_b) cv2.imshow('FGmask1',FGmask1) cv2.moveWindow('FGmask1',0,0) contours1,_ = cv2.findContours(FGmask1,cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_SIMPLE) contours1=sorted(contours1,key=lambda x:cv2.contourArea(x),reverse=True) for cnt in contours1: area=cv2.contourArea(cnt) (x,y,w,h)=cv2.boundingRect(cnt) if area>=100: cv2.rectangle(frame1,(x,y),(x+w,y+h),(0,255,255),3) objX=x+w/2 objY=y+h/2 errorPan1=objX-width/2 errorTilt1=objY-height/2 if abs(errorPan1)>15: pan1=pan1+errorPan1/40 if abs(errorTilt1)>15: tilt1=tilt1-errorTilt1/40 if pan1>180: pan1=180 print('Pan Out of Range') if pan1<0: pan1=0 print('Pan Out of Range') if tilt1>180: tilt1=180 print('Tilt Out of Range') if tilt1<0: tilt1=0 kit.servo[2].angle=pan1 kit.servo[3].angle=tilt1 break contours2,_ = cv2.findContours(FGmask2,cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_SIMPLE) contours2=sorted(contours2,key=lambda x:cv2.contourArea(x),reverse=True) for cnt in contours2: area=cv2.contourArea(cnt) (x,y,w,h)=cv2.boundingRect(cnt) if area>=100: cv2.rectangle(frame2,(x,y),(x+w,y+h),(0,255,255),3) objX=x+w/2 objY=y+h/2 errorPan2=objX-width/2 errorTilt2=objY-height/2 if abs(errorPan2)>15: pan2=pan2+errorPan2/40 if abs(errorTilt2)>15: tilt2=tilt2-errorTilt2/40 if pan2>180: pan2=180 print('Pan Out of Range') if pan2<0: pan2=0 print('Pan Out of Range') if tilt2>180: tilt2=180 print('Tilt Out of Range') if tilt2<0: tilt2=0 kit.servo[0].angle=pan2 kit.servo[1].angle=tilt2 break frame3=np.hstack((frame1,frame2)) dt=time.time()-timeMark timeMark=time.time() dtFIL=.9*dtFIL + .1*dt fps=1/dtFIL cv2.rectangle(frame3,(0,0),(150,40),(0,0,255),-1) cv2.putText(frame3,'fps: '+str(round(fps,1)),(0,30),font,1,(0,255,255),2) #cv2.imshow('myCam1',frame1) #cv2.imshow('myCam2',frame2) cv2.imshow('comboCam',frame3) cv2.moveWindow('comboCam',0,450) if cv2.waitKey(1)==ord('q'): break cam1.release() cam2.release() cv2.destroyAllWindows() |