我一直在研究对象检测和跟踪系统有一段时间了。当检测到一个人时,我尝试点亮 LED,根据分辨率范围的宽度确定边界框的坐标。截至目前,当我没有插入串行通信功能时,FPS 大约为 30。但是当我插入串行通讯时,fps 会在 7-10 左右下降得太低。什么可能导致这里的问题?
操作系统 = Windows
GPU = GTX 1070
CPU = i7
型号 = Darkflow, yolov2
目标检测代码。
import cv2
from darkflow.net.build import TFNet
import numpy as np
import time
from collections import namedtuple
import luggage_arduino
"""
Main system for running the whole script for object detection and tracking
"""
class NeuralNetwork:
def __init__(self):
"""Define model configuration and weight"""
options = {
'model': 'cfg/yolov2.cfg',
'load': 'bin/yolov2.weights',
'threshold': 0.8, # Sets the confidence level of detecting box, range from 0 to 1
'gpu': 0.8 # If do not want to use gpu, set to 0
}
"""Define OpenCV configuration"""
tfnet = TFNet(options)
colors = [tuple(255 * np.random.rand(3)) for _ in range(10)] # Set colors for different boxes
capture = cv2.VideoCapture(0, cv2.CAP_DSHOW)
capture.set(cv2.CAP_PROP_FRAME_WIDTH, 1280)
capture.set(cv2.CAP_PROP_FRAME_HEIGHT, 720)
while True: # Main loop for object detection and tracking
stime = time.time()
ret, frame = capture.read()
box = cv2.rectangle(frame, (0, 0), (426, 720), (0, 0, 255), 2) # Parameter of first segment (LEFT)
box2 = cv2.rectangle(frame, (426, 0), (852, 720), (0, 0, 255), 2) # Parameter of second segment (CENTER)
box3 = cv2.rectangle(frame, (852, 0), (1280, 720), (0, 0, 255), 2) # Parameter of third segment (RIGHT)
if ret:
results = tfnet.return_predict(frame)
for color, result in zip(colors, results):
tl = (result['topleft']['x'], result['topleft']['y'])
br = (result['bottomright']['x'], result['bottomright']['y'])
label = result['label']
confidence = result['confidence']
text = '{}: {:.0f}%'.format(label, confidence * 100)
frame = cv2.rectangle(frame, tl, br, color, 5)
frame = cv2.putText(frame, text, tl, cv2.FONT_HERSHEY_COMPLEX, 1, (0, 0, 0), 2)
self.center_of_box(tl, br) # Calls the function for coordinate calculation
cv2.imshow('frame', frame)
print('FPS {:.1f}'.format(1 / (time.time() - stime)))
if cv2.waitKey(1) & 0xFF == ord('q'):
break
capture.release()
cv2.destroyAllWindows()
def center_of_box(self, tl, br):
self.tl = tl
self.br = br
center_coord = namedtuple("center_coord", ['x', 'y']) # List of calculated center coord for each FPS
center_x = ((tl[0] + br[0]) / 2)
center_y = ((tl[1] + br[1]) / 2)
center_box = center_coord(center_x, center_y) # Save center coord of detected box in list
print(center_box)
self.box_tracking(center_x) # Call function for tracking the box coord
def box_tracking(self, center_x):
self.center_x = center_x
while True:
if 0 <= center_x <= 426:
center = -1
elif 426 < center_x <= 852:
center = 0
elif 852 < center_x <= 1280:
center = 1
else:
center = 2
break
luggage_arduino.Arduino(center) # Calls function for serial comm
pyserial comms 的代码:
import serial
import time
arduino = serial.Serial("com3", 9600)
def serial_comm(): # Pass the function
pass
"""Main class for serial comm"""
class Arduino:
def __init__(self, center):
self.serial_comm(center) # Calls function of serial comm
def serial_comm(self, center):
if center == -1:
time.sleep(1)
arduino.write(b'L') # b can be replaced with str.encode("Your string here")
serial_comm()
elif center == 0:
time.sleep(1)
arduino.write(b'C')
serial_comm()
elif center == 1:
time.sleep(1)
arduino.write(b'R')
serial_comm()
else:
center = 2
time.sleep(1)
arduino.write(b'N')
serial_comm()
time.sleep(2)