facetracker.py 9.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236
  1. #! /usr/bin/python3
  2. from picamera.array import PiRGBArray
  3. from picamera import PiCamera
  4. from datetime import datetime
  5. import time
  6. import cv2
  7. import sys
  8. import imutils
  9. import np
  10. import math # for sqrt distance formula
  11. # i2c stuff
  12. import smbus
  13. bus = smbus.SMBus(1)
  14. SLAVE_ADDRESS = 0x04
  15. everFound = False # have we ever found a face?
  16. lastmovement = None # set to 'right' or left'
  17. # Create the haar cascade
  18. frontalfaceCascade = cv2.CascadeClassifier("haarcascade_frontalface_default.xml")
  19. profilefaceCascade = cv2.CascadeClassifier("haarcascade_profileface.xml")
  20. face = [0,0,0,0] # This will hold the array that OpenCV returns when it finds a face: (makes a rectangle)
  21. center = [0,0] # Center of the face: a point calculated from the above variable
  22. lastface = 0 # int 1-3 used to speed up detection. The script is looking for a right profile face,-
  23. # a left profile face, or a frontal face; rather than searching for all three every time,-
  24. # it uses this variable to remember which is last saw: and looks for that again. If it-
  25. # doesn't find it, it's set back to zero and on the next loop it will search for all three.-
  26. # This basically tripples the detect time so long as the face hasn't moved much.
  27. scanleft = True # Should we scan for left profiles?
  28. scanright = True # should we scan for right profiles?
  29. # initialize the camera and grab a reference to the raw camera capture
  30. camera = PiCamera()
  31. #camera.resolution = (160, 120)
  32. #camera.resolution = (640,480)
  33. camera.resolution = (1024,768)
  34. cameracenter = (camera.resolution[0]/2, camera.resolution[1]/2)
  35. camera.framerate = 32
  36. rawCapture = PiRGBArray(camera, camera.resolution)
  37. # Points to the last place we sawa a face
  38. target = ( camera.resolution[0]/2, camera.resolution[1]/2 )
  39. # Fisheye corrections. See https://medium.com/@kennethjiang/calibrate-fisheye-lens-using-opencv-333b05afa0b0
  40. # 640x480:
  41. #correct_fisheye = False
  42. #DIM=(640, 480)
  43. #K=np.array([[363.787052141742, 0.0, 332.09761373599576], [0.0, 362.23769923959975, 238.35982850966641], [0.0, 0.0, 1.0]])
  44. #D=np.array([[-0.019982864934848042], [-0.10107557279423625], [0.20401597940960342], [-0.1406464201639892]])
  45. # 1024x768:
  46. correct_fisheye = True
  47. DIM=(1024, 768)
  48. K=np.array([[583.6639649321671, 0.0, 518.0139106134624], [0.0, 580.8039721094127, 384.32095600935503], [0.0, 0.0, 1.0]])
  49. D=np.array([[0.0028045742945672475], [-0.14423839478882694], [0.23715105072799644], [-0.1400677375634837]])
  50. def distance(p0, p1):
  51. return math.sqrt((p0[0] - p1[0])**2 + (p0[1] - p1[1])**2)
  52. def search_rightprofile(i):
  53. # return profilefaceCascade.detectMultiScale(i,1.3,4,(cv2.CV_HAAR_DO_CANNY_PRUNING + cv2.CV_HAAR_FIND_BIGGEST_OBJECT + cv2.CV_HAAR_DO_ROUGH_SEARCH),(30,30))
  54. if scanright:
  55. return profilefaceCascade.detectMultiScale(i)
  56. else:
  57. return ()
  58. def search_leftprofile(i):
  59. if scanleft:
  60. revimage = cv2.flip(i, 1) # Flip the image
  61. # return profilefaceCascade.detectMultiScale(i,1.3,4,(cv2.CV_HAAR_DO_CANNY_PRUNING + cv2.CV_HAAR_FIND_BIGGEST_OBJECT + cv2.CV_HAAR_DO_ROUGH_SEARCH),(30,30))
  62. return profilefaceCascade.detectMultiScale(i)
  63. else:
  64. return ()
  65. def search_frontface(i):
  66. # return frontalfaceCascade.detectMultiScale(i,1.3,4,(cv2.CV_HAAR_DO_CANNY_PRUNING + cv2.CV_HAAR_FIND_BIGGEST_OBJECT + cv2.CV_HAAR_DO_ROUGH_SEARCH),(30,30))
  67. return frontalfaceCascade.detectMultiScale(i)
  68. def undistort(i, balance=0.0, dim2=None, dim3=None):
  69. # Sanity Check the source dimensions
  70. dim1 = i.shape[:2][::-1] #dim1 is the dimension of input image to un-distort
  71. assert dim1[0]/dim1[1] == DIM[0]/DIM[1], "Image to undistort needs to have same aspect ratio as the ones used in calibration"
  72. if not dim2:
  73. dim2 = dim1
  74. if not dim3:
  75. dim3 = dim1
  76. scaled_K = K * dim1[0] / DIM[0] # The values of K is to scale with image dimension.
  77. scaled_K[2][2] = 1.0 # Except that K[2][2] is always 1.0
  78. # This is how scaled_K, dim2 and balance are used to determine the final K used to un-distort image. OpenCV document failed to make this clear!
  79. new_K = cv2.fisheye.estimateNewCameraMatrixForUndistortRectify(scaled_K, D, dim2, np.eye(3), balance=balance)
  80. map1, map2 = cv2.fisheye.initUndistortRectifyMap(scaled_K, D, np.eye(3), new_K, dim3, cv2.CV_16SC2)
  81. return cv2.remap(i, map1, map2, interpolation=cv2.INTER_LINEAR, borderMode=cv2.BORDER_CONSTANT)
  82. # allow the camera to warmup
  83. time.sleep(0.1)
  84. lastTime = time.time()*1000.0
  85. # capture frames from the camera
  86. for frame in camera.capture_continuous(rawCapture, format="bgr", use_video_port=True):
  87. # grab the raw NumPy array representing the image, then initialize the timestamp
  88. # and occupied/unoccupied text
  89. image = frame.array
  90. image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) # convert to greyscale
  91. if correct_fisheye:
  92. image = undistort(image, 0.8)
  93. faces = ();
  94. faceFound = False # This variable is set to true if, on THIS loop a face has already been found
  95. # We search for a face three diffrent ways, and if we have found one already-
  96. # there is no reason to keep looking.
  97. # First Scan
  98. if lastface == 1:
  99. faces = search_rightprofile(image)
  100. if faces != ():
  101. faceFound=True
  102. lastface = 1
  103. elif lastface == 2:
  104. faces = search_leftprofile(image)
  105. if faces != ():
  106. faceFound=True
  107. lastface = 2
  108. else:
  109. faces = search_frontface(image)
  110. if faces != ():
  111. lastface = 3
  112. faceFound=True
  113. # Second scan
  114. if not faceFound:
  115. if lastface == 1:
  116. faces = search_frontface(image)
  117. if faces != ():
  118. lastface = 3
  119. faceFound=True
  120. elif lastface == 2:
  121. faces = search_rightprofile(image)
  122. if faces != ():
  123. faceFound=True
  124. lastface = 1
  125. else:
  126. faces = search_leftprofile(image)
  127. if faces != ():
  128. faceFound=True
  129. lastface = 2
  130. # Third scan
  131. if not faceFound:
  132. if lastface == 1:
  133. faces = search_leftprofile(image)
  134. if faces != ():
  135. faceFound=True
  136. lastface = 2
  137. elif lastface == 2:
  138. faces = search_frontface(image)
  139. if faces != ():
  140. lastface = 3
  141. faceFound=True
  142. else:
  143. faces = search_rightprofile(image)
  144. if faces != ():
  145. faceFound=True
  146. lastface = 1
  147. if faceFound:
  148. everFound = True
  149. print("{}: {} faces found. Type: {}".format(time.time()*1000.0-lastTime, len(faces), lastface))
  150. # Draw a rectangle around the faces
  151. for (x, y, w, h) in faces:
  152. cv2.circle(image, (int(x+w/2), int(y+h/2)), int((w+h)/3), (255, 255, 255), 1)
  153. # Temporary, save the image
  154. cv2.imwrite("tmp/img.{}.facetype{}.png".format(datetime.now().strftime("%Y%m%d.%H%M%S.%f"), lastface), image)
  155. # Find the centermost face
  156. curdistance = 1000000 # Outside the dimensions of the picture
  157. for f in faces:
  158. x,y,w,h = f
  159. tmpcenter = [(w/2+x),(h/2+y)] # we are given an x,y corner point and a width and height, we need the center
  160. tmpdistance = distance(tmpcenter, cameracenter)
  161. if(tmpdistance < curdistance):
  162. print("Face closer to center detected. New target location: ({}, {}) - distance: {}".format(tmpcenter[0],tmpcenter[1],tmpdistance))
  163. center = tmpcenter;
  164. target = center
  165. else: # No face found
  166. print("{}: no faces found. Continuing with existing target ({}, {})".format(time.time()*1000.0-lastTime, target[0], target[1]))
  167. # clear the stream in preparation for the next frame
  168. rawCapture.truncate(0)
  169. lastTime = time.time()*1000.0
  170. # Determine directions and distance
  171. travel = [ (cameracenter[0] - target[0]) / camera.resolution[0], (cameracenter[1] - target[1]) /camera.resolution[1] ]
  172. print("To move horizontal: {}, vertical: {}".format(travel[0], travel[1]))
  173. # horizontal movement
  174. if abs(travel[0]) < 10.0 or everFound == False:
  175. # Fire!
  176. everFound = False # No face found since last detection
  177. try:
  178. bus.write_byte(SLAVE_ADDRESS, ord('F'))
  179. print("Sent '{}' to arduino.".format(ord(var('F'))))
  180. except:
  181. print("Bus I/O error. Continuing.")
  182. continue
  183. if travel[0] > 0 and lastmovement != "right":
  184. # Move right
  185. lastmovement = "right"
  186. try:
  187. bus.write_byte(SLAVE_ADDRESS, ord(' '))
  188. bus.write_byte(SLAVE_ADDRESS, ord('d'))
  189. print("Sent '{}' to arduino.".format(ord(var('d'))))
  190. except:
  191. print("Bus I/O error. Continuing.")
  192. continue
  193. if travel[0] < 0 and lastmovement != "left":
  194. # Move left
  195. lastmovement = "left"
  196. try:
  197. bus.write_byte(SLAVE_ADDRESS, ord(' '))
  198. bus.write_byte(SLAVE_ADDRESS, ord('a'))
  199. print("Sent '{}' to arduino.".format(ord(var('a'))))
  200. except:
  201. print("Bus I/O error. Continuing.")
  202. continue