facetracker.py 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299
  1. #! /usr/bin/python3
  2. from picamera.array import PiRGBArray
  3. from picamera import PiCamera
  4. from datetime import datetime
  5. import time
  6. import cv2
  7. import sys
  8. import imutils
  9. import np
  10. import math # for sqrt distance formula
  11. # i2c stuff
  12. import smbus
  13. bus = smbus.SMBus(1)
  14. SLAVE_ADDRESS = 0x04
  15. # Create the haar cascade
  16. #frontalfaceCascade = cv2.CascadeClassifier("haarcascade_frontalface_default.xml")
  17. frontalfaceCascade = cv2.CascadeClassifier("frontalface_fromweb.xml")
  18. profilefaceCascade = cv2.CascadeClassifier("haarcascade_profileface.xml")
  19. face = [0,0,0,0] # This will hold the array that OpenCV returns when it finds a face: (makes a rectangle)
  20. center = [0,0] # Center of the face: a point calculated from the above variable
  21. lastface = 0 # int 1-3 used to speed up detection. The script is looking for a right profile face,-
  22. # a left profile face, or a frontal face; rather than searching for all three every time,-
  23. # it uses this variable to remember which is last saw: and looks for that again. If it-
  24. # doesn't find it, it's set back to zero and on the next loop it will search for all three.-
  25. # This basically tripples the detect time so long as the face hasn't moved much.
  26. scanleft = True # Should we scan for left profiles?
  27. scanright = True # should we scan for right profiles?
  28. # initialize the camera and grab a reference to the raw camera capture
  29. camera = PiCamera()
  30. #camera.resolution = (160, 120)
  31. #camera.resolution = (640,480)
  32. camera.resolution = (1024,768)
  33. cameracenter = (camera.resolution[0]/2, camera.resolution[1]/2)
  34. camera.framerate = 32
  35. rawCapture = PiRGBArray(camera, camera.resolution)
  36. # Points to the last place we sawa a face
  37. target = ( camera.resolution[0]/2, camera.resolution[1]/2 )
  38. # Fisheye corrections. See https://medium.com/@kennethjiang/calibrate-fisheye-lens-using-opencv-333b05afa0b0
  39. # 640x480:
  40. #correct_fisheye = False
  41. #DIM=(640, 480)
  42. #K=np.array([[363.787052141742, 0.0, 332.09761373599576], [0.0, 362.23769923959975, 238.35982850966641], [0.0, 0.0, 1.0]])
  43. #D=np.array([[-0.019982864934848042], [-0.10107557279423625], [0.20401597940960342], [-0.1406464201639892]])
  44. # 1024x768:
  45. correct_fisheye = True
  46. DIM=(1024, 768)
  47. K=np.array([[583.6639649321671, 0.0, 518.0139106134624], [0.0, 580.8039721094127, 384.32095600935503], [0.0, 0.0, 1.0]])
  48. D=np.array([[0.0028045742945672475], [-0.14423839478882694], [0.23715105072799644], [-0.1400677375634837]])
  49. #def distance(p0, p1):
  50. # return math.sqrt((p0[0] - p1[0])**2 + (p0[1] - p1[1])**2)
  51. def search_rightprofile(i):
  52. # return profilefaceCascade.detectMultiScale(i,1.3,4,(cv2.CV_HAAR_DO_CANNY_PRUNING + cv2.CV_HAAR_FIND_BIGGEST_OBJECT + cv2.CV_HAAR_DO_ROUGH_SEARCH),(30,30))
  53. if scanright:
  54. return profilefaceCascade.detectMultiScale(i, maxSize=(800,800))
  55. else:
  56. return ()
  57. def search_leftprofile(i):
  58. if scanleft:
  59. revimage = cv2.flip(i, 1) # Flip the image
  60. # return profilefaceCascade.detectMultiScale(i,1.3,4,(cv2.CV_HAAR_DO_CANNY_PRUNING + cv2.CV_HAAR_FIND_BIGGEST_OBJECT + cv2.CV_HAAR_DO_ROUGH_SEARCH),(30,30))
  61. return profilefaceCascade.detectMultiScale(i, maxSize=(800,800))
  62. else:
  63. return ()
  64. def search_frontface(i):
  65. # return frontalfaceCascade.detectMultiScale(i,1.3,4,(cv2.CV_HAAR_DO_CANNY_PRUNING + cv2.CV_HAAR_FIND_BIGGEST_OBJECT + cv2.CV_HAAR_DO_ROUGH_SEARCH),(30,30))
  66. return frontalfaceCascade.detectMultiScale(i, maxSize=(800,800))
  67. def undistort(i, balance=0.0, dim2=None, dim3=None):
  68. # Sanity Check the source dimensions
  69. dim1 = i.shape[:2][::-1] #dim1 is the dimension of input image to un-distort
  70. assert dim1[0]/dim1[1] == DIM[0]/DIM[1], "Image to undistort needs to have same aspect ratio as the ones used in calibration"
  71. if not dim2:
  72. dim2 = dim1
  73. if not dim3:
  74. dim3 = dim1
  75. scaled_K = K * dim1[0] / DIM[0] # The values of K is to scale with image dimension.
  76. scaled_K[2][2] = 1.0 # Except that K[2][2] is always 1.0
  77. # This is how scaled_K, dim2 and balance are used to determine the final K used to un-distort image. OpenCV document failed to make this clear!
  78. new_K = cv2.fisheye.estimateNewCameraMatrixForUndistortRectify(scaled_K, D, dim2, np.eye(3), balance=balance)
  79. map1, map2 = cv2.fisheye.initUndistortRectifyMap(scaled_K, D, np.eye(3), new_K, dim3, cv2.CV_16SC2)
  80. return cv2.remap(i, map1, map2, interpolation=cv2.INTER_LINEAR, borderMode=cv2.BORDER_CONSTANT)
  81. def findface(image):
  82. global lastface
  83. global correct_fisheye
  84. faces = ();
  85. # TODO: There is a better way to do this. Find it.
  86. # First Scan
  87. if lastface == 1:
  88. faces = search_rightprofile(image)
  89. if faces != ():
  90. lastface = 1
  91. return faces
  92. elif lastface == 2:
  93. faces = search_leftprofile(image)
  94. if faces != ():
  95. lastface = 2
  96. return faces
  97. else:
  98. faces = search_frontface(image)
  99. if faces != ():
  100. faceFound=True
  101. return faces
  102. # Second scan
  103. if lastface == 1:
  104. faces = search_frontface(image)
  105. if faces != ():
  106. lastface = 3
  107. return faces
  108. elif lastface == 2:
  109. faces = search_rightprofile(image)
  110. if faces != ():
  111. lastface = 1
  112. return faces
  113. else:
  114. faces = search_leftprofile(image)
  115. if faces != ():
  116. lastface = 2
  117. return faces
  118. # Third scan
  119. if lastface == 1:
  120. faces = search_leftprofile(image)
  121. if faces != ():
  122. lastface = 2
  123. return faces
  124. elif lastface == 2:
  125. faces = search_frontface(image)
  126. if faces != ():
  127. lastface = 3
  128. return faces
  129. else:
  130. faces = search_rightprofile(image)
  131. if faces != ():
  132. lastface = 1
  133. return faces
  134. return ()
  135. def circlefaces(image, faces):
  136. global lastface
  137. for (x, y, w, h) in faces:
  138. cv2.circle(image, (int(x+w/2), int(y+h/2)), int((w+h)/3), (0, 255, 0), 1)
  139. # Temporary, save the image
  140. cv2.imwrite("tmp/img.{}.facetype{}.png".format(datetime.now().strftime("%Y%m%d.%H%M%S.%f"), lastface), image)
  141. def distance_to_closest(faces):
  142. # Negative values will be left
  143. closestdistance = None
  144. for f in faces:
  145. x,y,w,h = f
  146. centerpoint = (w/2)+x
  147. distance = centerpoint - cameracenter[0]
  148. if(closestdistance == None or abs(distance) < closestdistance):
  149. print("Face closer to center detected. New target location: {} (ctr: {}) - distance: {}".format(centerpoint, cameracenter[0], distance))
  150. closestdistance = distance
  151. return closestdistance
  152. def stop():
  153. print("Would stop.")
  154. return
  155. def left():
  156. print('Would go left.')
  157. return
  158. def right():
  159. print('Would go left.')
  160. return
  161. def fire():
  162. print("Would fire.")
  163. if __name__ == "__main__":
  164. # allow the camera to warmup
  165. time.sleep(0.1)
  166. lastTime = time.time()*1000.0
  167. lastAction = None
  168. # capture frames from the camera
  169. for frame in camera.capture_continuous(rawCapture, format="bgr", use_video_port=True):
  170. # grab the raw NumPy array representing the image, then initialize the timestamp
  171. # and occupied/unoccupied text
  172. print('Time: {}'.format(time.time()*1000.0 - lastTime))
  173. lastTime = time.time()*1000.0
  174. image = frame.array
  175. image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) # convert to greyscale
  176. if correct_fisheye:
  177. image = undistort(image, 0.8)
  178. faces = findface(image)
  179. if faces == ():
  180. print("No face found.")
  181. stop()
  182. lastAction = "Stop"
  183. rawCapture.truncate(0)
  184. continue
  185. circlefaces(image, faces)
  186. distance = distance_to_closest(faces)
  187. if abs(distance) < 10 and lastAction != "Fire":
  188. if lastAction == "Stop":
  189. lastAction = "Fire"
  190. fire()
  191. else:
  192. lastAction = "Stop"
  193. stop()
  194. elif distance < 0 and lastAction != "Left":
  195. lastAction = "Left"
  196. left()
  197. elif distance > 0 and lastAction != "Right":
  198. lastAction = "Right"
  199. right()
  200. else:
  201. print("Face found but no action taken. Distance = {}; Last Action = {}".format(distance, lastAction))
  202. # clear the stream in preparation for the next frame
  203. rawCapture.truncate(0)
  204. # horizontal movement
  205. # if abs(travel[0]) < 0.01 and everFound == True:
  206. # # Fire!
  207. # everFound = False # No face found since last firing
  208. # try:
  209. # bus.write_byte(SLAVE_ADDRESS, ord(' '))
  210. # time.sleep(1)
  211. # bus.write_byte(SLAVE_ADDRESS, ord('F'))
  212. # bus.write_i2c_block_data(SLAVE_ADDRESS, ord(' '), [ord('F')])
  213. # print("Sent '{}' to arduino.".format(ord('F')))
  214. # except:
  215. # print("Bus I/O error: {}".format(str(e)))
  216. # continue
  217. #
  218. # if travel[0] < 0 and lastmovement != "right":
  219. # # Move right
  220. # lastmovement = "right"
  221. # print("Moving right.")
  222. # try:
  223. # bus.write_byte(SLAVE_ADDRESS, ord(' '))
  224. # time.sleep(1)
  225. # bus.write_byte(SLAVE_ADDRESS, ord('d'))
  226. # time.sleep(1)
  227. # bus.write_byte(SLAVE_ADDRESS, ord('d'))
  228. # time.sleep(1)
  229. # bus.write_byte(SLAVE_ADDRESS, ord('d'))
  230. # time.sleep(1)
  231. # bus.write_byte(SLAVE_ADDRESS, ord('d'))
  232. # print("Sent '{}' to arduino.".format(ord('d')))
  233. # except Exception as e:
  234. # print("Bus I/O error: {}".format(str(e)))
  235. # continue
  236. #
  237. # if travel[0] > 0 and lastmovement != "left":
  238. # # Move left
  239. # lastmovement = "left"
  240. # print("Moving left.")
  241. # try:
  242. # bus.write_byte(SLAVE_ADDRESS, ord(' '))
  243. # time.sleep(1)
  244. # bus.write_byte(SLAVE_ADDRESS, ord('a'))
  245. # time.sleep(1)
  246. # bus.write_byte(SLAVE_ADDRESS, ord('a'))
  247. # time.sleep(1)
  248. # bus.write_byte(SLAVE_ADDRESS, ord('a'))
  249. # time.sleep(1)
  250. # bus.write_byte(SLAVE_ADDRESS, ord('a'))
  251. # time.sleep(1)
  252. # print("Sent '{}' to arduino.".format(ord('a')))
  253. # except:
  254. # print("Bus I/O error: {}".format(str(e)))
  255. # continue