feature-tracker.py 6.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171
  1. import cv2
  2. import depthai as dai
  3. from collections import deque
  4. class FeatureTrackerDrawer:
  5. lineColor = (200, 0, 200)
  6. pointColor = (0, 0, 255)
  7. circleRadius = 2
  8. maxTrackedFeaturesPathLength = 30
  9. # for how many frames the feature is tracked
  10. trackedFeaturesPathLength = 10
  11. trackedIDs = None
  12. trackedFeaturesPath = None
  13. def onTrackBar(self, val):
  14. FeatureTrackerDrawer.trackedFeaturesPathLength = val
  15. pass
  16. def trackFeaturePath(self, features):
  17. newTrackedIDs = set()
  18. for currentFeature in features:
  19. currentID = currentFeature.id
  20. newTrackedIDs.add(currentID)
  21. if currentID not in self.trackedFeaturesPath:
  22. self.trackedFeaturesPath[currentID] = deque()
  23. path = self.trackedFeaturesPath[currentID]
  24. path.append(currentFeature.position)
  25. while(len(path) > max(1, FeatureTrackerDrawer.trackedFeaturesPathLength)):
  26. path.popleft()
  27. self.trackedFeaturesPath[currentID] = path
  28. featuresToRemove = set()
  29. for oldId in self.trackedIDs:
  30. if oldId not in newTrackedIDs:
  31. featuresToRemove.add(oldId)
  32. for id in featuresToRemove:
  33. self.trackedFeaturesPath.pop(id)
  34. self.trackedIDs = newTrackedIDs
  35. def drawFeatures(self, img):
  36. cv2.setTrackbarPos(self.trackbarName, self.windowName, FeatureTrackerDrawer.trackedFeaturesPathLength)
  37. for featurePath in self.trackedFeaturesPath.values():
  38. path = featurePath
  39. for j in range(len(path) - 1):
  40. src = (int(path[j].x), int(path[j].y))
  41. dst = (int(path[j + 1].x), int(path[j + 1].y))
  42. cv2.line(img, src, dst, self.lineColor, 1, cv2.LINE_AA, 0)
  43. j = len(path) - 1
  44. cv2.circle(img, (int(path[j].x), int(path[j].y)), self.circleRadius, self.pointColor, -1, cv2.LINE_AA, 0)
  45. def __init__(self, trackbarName, windowName):
  46. self.trackbarName = trackbarName
  47. self.windowName = windowName
  48. cv2.namedWindow(windowName)
  49. cv2.createTrackbar(trackbarName, windowName, FeatureTrackerDrawer.trackedFeaturesPathLength, FeatureTrackerDrawer.maxTrackedFeaturesPathLength, self.onTrackBar)
  50. self.trackedIDs = set()
  51. self.trackedFeaturesPath = dict()
  52. # Create pipeline
  53. pipeline = dai.Pipeline()
  54. # Define sources and outputs
  55. monoLeft = pipeline.create(dai.node.MonoCamera)
  56. monoRight = pipeline.create(dai.node.MonoCamera)
  57. featureTrackerLeft = pipeline.create(dai.node.FeatureTracker)
  58. featureTrackerRight = pipeline.create(dai.node.FeatureTracker)
  59. xoutPassthroughFrameLeft = pipeline.create(dai.node.XLinkOut)
  60. xoutTrackedFeaturesLeft = pipeline.create(dai.node.XLinkOut)
  61. xoutPassthroughFrameRight = pipeline.create(dai.node.XLinkOut)
  62. xoutTrackedFeaturesRight = pipeline.create(dai.node.XLinkOut)
  63. xinTrackedFeaturesConfig = pipeline.create(dai.node.XLinkIn)
  64. xoutPassthroughFrameLeft.setStreamName("passthroughFrameLeft")
  65. xoutTrackedFeaturesLeft.setStreamName("trackedFeaturesLeft")
  66. xoutPassthroughFrameRight.setStreamName("passthroughFrameRight")
  67. xoutTrackedFeaturesRight.setStreamName("trackedFeaturesRight")
  68. xinTrackedFeaturesConfig.setStreamName("trackedFeaturesConfig")
  69. # Properties
  70. monoLeft.setResolution(dai.MonoCameraProperties.SensorResolution.THE_720_P)
  71. monoLeft.setBoardSocket(dai.CameraBoardSocket.LEFT)
  72. monoRight.setResolution(dai.MonoCameraProperties.SensorResolution.THE_720_P)
  73. monoRight.setBoardSocket(dai.CameraBoardSocket.RIGHT)
  74. # Linking
  75. monoLeft.out.link(featureTrackerLeft.inputImage)
  76. featureTrackerLeft.passthroughInputImage.link(xoutPassthroughFrameLeft.input)
  77. featureTrackerLeft.outputFeatures.link(xoutTrackedFeaturesLeft.input)
  78. xinTrackedFeaturesConfig.out.link(featureTrackerLeft.inputConfig)
  79. monoRight.out.link(featureTrackerRight.inputImage)
  80. featureTrackerRight.passthroughInputImage.link(xoutPassthroughFrameRight.input)
  81. featureTrackerRight.outputFeatures.link(xoutTrackedFeaturesRight.input)
  82. xinTrackedFeaturesConfig.out.link(featureTrackerRight.inputConfig)
  83. # By default the least mount of resources are allocated
  84. # increasing it improves performance
  85. numShaves = 2
  86. numMemorySlices = 2
  87. featureTrackerLeft.setHardwareResources(numShaves, numMemorySlices)
  88. featureTrackerRight.setHardwareResources(numShaves, numMemorySlices)
  89. featureTrackerConfig = featureTrackerRight.initialConfig.get()
  90. print("Press 's' to switch between Lucas-Kanade optical flow and hardware accelerated motion estimation!")
  91. # Connect to device and start pipeline
  92. with dai.Device(pipeline) as device:
  93. # Output queues used to receive the results
  94. passthroughImageLeftQueue = device.getOutputQueue("passthroughFrameLeft", 8, False)
  95. outputFeaturesLeftQueue = device.getOutputQueue("trackedFeaturesLeft", 8, False)
  96. passthroughImageRightQueue = device.getOutputQueue("passthroughFrameRight", 8, False)
  97. outputFeaturesRightQueue = device.getOutputQueue("trackedFeaturesRight", 8, False)
  98. inputFeatureTrackerConfigQueue = device.getInputQueue("trackedFeaturesConfig")
  99. leftWindowName = "left"
  100. leftFeatureDrawer = FeatureTrackerDrawer("Feature tracking duration (frames)", leftWindowName)
  101. rightWindowName = "right"
  102. rightFeatureDrawer = FeatureTrackerDrawer("Feature tracking duration (frames)", rightWindowName)
  103. while True:
  104. inPassthroughFrameLeft = passthroughImageLeftQueue.get()
  105. passthroughFrameLeft = inPassthroughFrameLeft.getFrame()
  106. leftFrame = cv2.cvtColor(passthroughFrameLeft, cv2.COLOR_GRAY2BGR)
  107. inPassthroughFrameRight = passthroughImageRightQueue.get()
  108. passthroughFrameRight = inPassthroughFrameRight.getFrame()
  109. rightFrame = cv2.cvtColor(passthroughFrameRight, cv2.COLOR_GRAY2BGR)
  110. trackedFeaturesLeft = outputFeaturesLeftQueue.get().trackedFeatures
  111. leftFeatureDrawer.trackFeaturePath(trackedFeaturesLeft)
  112. leftFeatureDrawer.drawFeatures(leftFrame)
  113. trackedFeaturesRight = outputFeaturesRightQueue.get().trackedFeatures
  114. rightFeatureDrawer.trackFeaturePath(trackedFeaturesRight)
  115. rightFeatureDrawer.drawFeatures(rightFrame)
  116. # Show the frame
  117. cv2.imshow(leftWindowName, leftFrame)
  118. cv2.imshow(rightWindowName, rightFrame)
  119. key = cv2.waitKey(1)
  120. if key == 27:
  121. break
  122. elif key == ord('s'):
  123. if featureTrackerConfig.motionEstimator.type == dai.FeatureTrackerConfig.MotionEstimator.Type.LUCAS_KANADE_OPTICAL_FLOW:
  124. featureTrackerConfig.motionEstimator.type = dai.FeatureTrackerConfig.MotionEstimator.Type.HW_MOTION_ESTIMATION
  125. print("Switching to hardware accelerated motion estimation")
  126. else:
  127. featureTrackerConfig.motionEstimator.type = dai.FeatureTrackerConfig.MotionEstimator.Type.LUCAS_KANADE_OPTICAL_FLOW
  128. print("Switching to Lucas-Kanade optical flow")
  129. cfg = dai.FeatureTrackerConfig()
  130. cfg.set(featureTrackerConfig)
  131. inputFeatureTrackerConfigQueue.send(cfg)