facetracker.py 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327
  1. #! /usr/bin/python3
  2. from picamera.array import PiRGBArray
  3. from picamera import PiCamera
  4. from datetime import datetime
  5. import time
  6. import cv2
  7. import sys
  8. import imutils
  9. import np
  10. import math # for sqrt distance formula
  11. # i2c stuff
  12. import smbus
  13. bus = smbus.SMBus(1)
  14. SLAVE_ADDRESS = 0x04
  15. # Create the haar cascade
  16. #frontalfaceCascade = cv2.CascadeClassifier("haarcascade_frontalface_default.xml")
  17. frontalfaceCascade = cv2.CascadeClassifier("frontalface_fromweb.xml")
  18. profilefaceCascade = cv2.CascadeClassifier("haarcascade_profileface.xml")
  19. face = [0,0,0,0] # This will hold the array that OpenCV returns when it finds a face: (makes a rectangle)
  20. center = [0,0] # Center of the face: a point calculated from the above variable
  21. lastface = 0 # int 1-3 used to speed up detection. The script is looking for a right profile face,-
  22. # a left profile face, or a frontal face; rather than searching for all three every time,-
  23. # it uses this variable to remember which is last saw: and looks for that again. If it-
  24. # doesn't find it, it's set back to zero and on the next loop it will search for all three.-
  25. # This basically tripples the detect time so long as the face hasn't moved much.
  26. scanleft = True # Should we scan for left profiles?
  27. scanright = True # should we scan for right profiles?
  28. # initialize the camera and grab a reference to the raw camera capture
  29. camera = PiCamera()
  30. #camera.resolution = (160, 120)
  31. #camera.resolution = (640,480)
  32. camera.resolution = (1024,768)
  33. cameracenter = (camera.resolution[0]/2, camera.resolution[1]/2)
  34. camera.framerate = 32
  35. rawCapture = PiRGBArray(camera, camera.resolution)
  36. # Points to the last place we sawa a face
  37. target = ( camera.resolution[0]/2, camera.resolution[1]/2 )
  38. # Fisheye corrections. See https://medium.com/@kennethjiang/calibrate-fisheye-lens-using-opencv-333b05afa0b0
  39. # 640x480:
  40. #correct_fisheye = False
  41. #DIM=(640, 480)
  42. #K=np.array([[363.787052141742, 0.0, 332.09761373599576], [0.0, 362.23769923959975, 238.35982850966641], [0.0, 0.0, 1.0]])
  43. #D=np.array([[-0.019982864934848042], [-0.10107557279423625], [0.20401597940960342], [-0.1406464201639892]])
  44. # 1024x768:
  45. correct_fisheye = True
  46. DIM=(1024, 768)
  47. K=np.array([[583.6639649321671, 0.0, 518.0139106134624], [0.0, 580.8039721094127, 384.32095600935503], [0.0, 0.0, 1.0]])
  48. D=np.array([[0.0028045742945672475], [-0.14423839478882694], [0.23715105072799644], [-0.1400677375634837]])
  49. #def distance(p0, p1):
  50. # return math.sqrt((p0[0] - p1[0])**2 + (p0[1] - p1[1])**2)
  51. def search_rightprofile(i):
  52. # return profilefaceCascade.detectMultiScale(i,1.3,4,(cv2.CV_HAAR_DO_CANNY_PRUNING + cv2.CV_HAAR_FIND_BIGGEST_OBJECT + cv2.CV_HAAR_DO_ROUGH_SEARCH),(30,30))
  53. if scanright:
  54. return profilefaceCascade.detectMultiScale(i, maxSize=(30,30))
  55. else:
  56. return ()
  57. def search_leftprofile(i):
  58. if scanleft:
  59. revimage = cv2.flip(i, 1) # Flip the image
  60. # return profilefaceCascade.detectMultiScale(i,1.3,4,(cv2.CV_HAAR_DO_CANNY_PRUNING + cv2.CV_HAAR_FIND_BIGGEST_OBJECT + cv2.CV_HAAR_DO_ROUGH_SEARCH),(30,30))
  61. return profilefaceCascade.detectMultiScale(i, maxSize=(30,30))
  62. else:
  63. return ()
  64. def search_frontface(i):
  65. # return frontalfaceCascade.detectMultiScale(i,1.3,4,(cv2.CV_HAAR_DO_CANNY_PRUNING + cv2.CV_HAAR_FIND_BIGGEST_OBJECT + cv2.CV_HAAR_DO_ROUGH_SEARCH),(30,30))
  66. return frontalfaceCascade.detectMultiScale(i, maxSize=(30,30))
  67. def undistort(i, balance=0.0, dim2=None, dim3=None):
  68. # Sanity Check the source dimensions
  69. dim1 = i.shape[:2][::-1] #dim1 is the dimension of input image to un-distort
  70. assert dim1[0]/dim1[1] == DIM[0]/DIM[1], "Image to undistort needs to have same aspect ratio as the ones used in calibration"
  71. if not dim2:
  72. dim2 = dim1
  73. if not dim3:
  74. dim3 = dim1
  75. scaled_K = K * dim1[0] / DIM[0] # The values of K is to scale with image dimension.
  76. scaled_K[2][2] = 1.0 # Except that K[2][2] is always 1.0
  77. # This is how scaled_K, dim2 and balance are used to determine the final K used to un-distort image. OpenCV document failed to make this clear!
  78. new_K = cv2.fisheye.estimateNewCameraMatrixForUndistortRectify(scaled_K, D, dim2, np.eye(3), balance=balance)
  79. map1, map2 = cv2.fisheye.initUndistortRectifyMap(scaled_K, D, np.eye(3), new_K, dim3, cv2.CV_16SC2)
  80. return cv2.remap(i, map1, map2, interpolation=cv2.INTER_LINEAR, borderMode=cv2.BORDER_CONSTANT)
  81. def findface(image):
  82. global lastface
  83. global correct_fisheye
  84. faces = ();
  85. # TODO: There is a better way to do this. Find it.
  86. # First Scan
  87. if lastface == 1:
  88. faces = search_rightprofile(image)
  89. if faces != ():
  90. lastface = 1
  91. return faces
  92. elif lastface == 2:
  93. faces = search_leftprofile(image)
  94. if faces != ():
  95. lastface = 2
  96. return faces
  97. else:
  98. faces = search_frontface(image)
  99. if faces != ():
  100. faceFound=True
  101. return faces
  102. # Second scan
  103. if lastface == 1:
  104. faces = search_frontface(image)
  105. if faces != ():
  106. lastface = 3
  107. return faces
  108. elif lastface == 2:
  109. faces = search_rightprofile(image)
  110. if faces != ():
  111. lastface = 1
  112. return faces
  113. else:
  114. faces = search_leftprofile(image)
  115. if faces != ():
  116. lastface = 2
  117. return faces
  118. # Third scan
  119. if lastface == 1:
  120. faces = search_leftprofile(image)
  121. if faces != ():
  122. lastface = 2
  123. return faces
  124. elif lastface == 2:
  125. faces = search_frontface(image)
  126. if faces != ():
  127. lastface = 3
  128. return faces
  129. else:
  130. faces = search_rightprofile(image)
  131. if faces != ():
  132. lastface = 1
  133. return faces
  134. return ()
  135. def circlefaces(image, faces):
  136. global lastface
  137. for (x, y, w, h) in faces:
  138. cv2.circle(image, (int(x+w/2), int(y+h/2)), int((w+h)/3), (255, 255, 0), 1)
  139. # Temporary, save the image
  140. cv2.imwrite("tmp/img.{}.facetype{}.png".format(datetime.now().strftime("%Y%m%d.%H%M%S.%f"), lastface), image)
  141. def distance_to_closest(faces):
  142. # Negative values will be left
  143. closestdistance = None
  144. for f in faces:
  145. x,y,w,h = f
  146. print("Face found at {},{} with width {} and height {}.".format(x,y,w,h))
  147. centerpoint = (w/2)+x
  148. distance = centerpoint - cameracenter[0]
  149. if(closestdistance == None or abs(distance) < closestdistance):
  150. print("Face closer to center detected. New target location: {} (ctr: {}) - distance: {}".format(centerpoint, cameracenter[0], distance))
  151. closestdistance = distance
  152. return closestdistance
  153. def send_char(tc):
  154. try:
  155. bus.write_byte(SLAVE_ADDRESS, ord(tc))
  156. except Exception as e:
  157. print("Bus Error while sending {}: {}".format(tc, str(e)))
  158. def stop():
  159. print("STOPPING")
  160. send_char(' ')
  161. return
  162. def left(distance):
  163. send_char('a')
  164. if abs(distance) > 100:
  165. print('GO LEFT')
  166. return
  167. else:
  168. print('GO LEFT FOR {}s'.format(1.0*abs(distance)/100.0))
  169. time.sleep(1.0*abs(distance)/100.0)
  170. stop()
  171. def right(distance=None):
  172. send_char('d')
  173. if distance == None:
  174. print("GO RIGHT")
  175. return
  176. else:
  177. print('GO RIGHT FOR {}s'.format(1.0*abs(distance)/100.0))
  178. time.sleep(1.0*abs(distance)/100.0)
  179. stop()
  180. def fire():
  181. print("FIRING!")
  182. send_char('F')
  183. return
  184. if __name__ == "__main__":
  185. # allow the camera to warmup
  186. time.sleep(0.1)
  187. lastTime = time.time()*1000.0
  188. lastAction = None
  189. # capture frames from the camera
  190. for frame in camera.capture_continuous(rawCapture, format="bgr", use_video_port=True):
  191. # grab the raw NumPy array representing the image, then initialize the timestamp
  192. # and occupied/unoccupied text
  193. print('Time: {}'.format(time.time()*1000.0 - lastTime))
  194. lastTime = time.time()*1000.0
  195. image = frame.array
  196. # image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) # convert to greyscale
  197. if correct_fisheye:
  198. image = undistort(image, 0.8)
  199. faces = findface(image)
  200. if faces == ():
  201. print("No face found.")
  202. stop()
  203. lastAction = "Stop"
  204. rawCapture.truncate(0)
  205. continue
  206. circlefaces(image, faces)
  207. distance = distance_to_closest(faces)
  208. if abs(distance) < 15:
  209. if lastAction == "Fire":
  210. # Do nothing
  211. time.sleep(1)
  212. elif lastAction == "Stop":
  213. lastAction = "Fire"
  214. fire()
  215. print("Sleeping for 15 seconds...")
  216. time.sleep(15)
  217. print("Resuming...")
  218. else:
  219. lastAction = "Stop"
  220. stop()
  221. elif distance < 0:
  222. lastAction = "Left"
  223. left(distance)
  224. elif distance > 0:
  225. lastAction = "Right"
  226. right(distance)
  227. else:
  228. print("Face found but no action taken. Distance = {}; Last Action = {}".format(distance, lastAction))
  229. # clear the stream in preparation for the next frame
  230. rawCapture.truncate(0)
  231. # horizontal movement
  232. # if abs(travel[0]) < 0.01 and everFound == True:
  233. # # Fire!
  234. # everFound = False # No face found since last firing
  235. # try:
  236. # bus.write_byte(SLAVE_ADDRESS, ord(' '))
  237. # time.sleep(1)
  238. # bus.write_byte(SLAVE_ADDRESS, ord('F'))
  239. # bus.write_i2c_block_data(SLAVE_ADDRESS, ord(' '), [ord('F')])
  240. # print("Sent '{}' to arduino.".format(ord('F')))
  241. # except:
  242. # print("Bus I/O error: {}".format(str(e)))
  243. # continue
  244. #
  245. # if travel[0] < 0 and lastmovement != "right":
  246. # # Move right
  247. # lastmovement = "right"
  248. # print("Moving right.")
  249. # try:
  250. # bus.write_byte(SLAVE_ADDRESS, ord(' '))
  251. # time.sleep(1)
  252. # bus.write_byte(SLAVE_ADDRESS, ord('d'))
  253. # time.sleep(1)
  254. # bus.write_byte(SLAVE_ADDRESS, ord('d'))
  255. # time.sleep(1)
  256. # bus.write_byte(SLAVE_ADDRESS, ord('d'))
  257. # time.sleep(1)
  258. # bus.write_byte(SLAVE_ADDRESS, ord('d'))
  259. # print("Sent '{}' to arduino.".format(ord('d')))
  260. # except Exception as e:
  261. # print("Bus I/O error: {}".format(str(e)))
  262. # continue
  263. #
  264. # if travel[0] > 0 and lastmovement != "left":
  265. # # Move left
  266. # lastmovement = "left"
  267. # print("Moving left.")
  268. # try:
  269. # bus.write_byte(SLAVE_ADDRESS, ord(' '))
  270. # time.sleep(1)
  271. # bus.write_byte(SLAVE_ADDRESS, ord('a'))
  272. # time.sleep(1)
  273. # bus.write_byte(SLAVE_ADDRESS, ord('a'))
  274. # time.sleep(1)
  275. # bus.write_byte(SLAVE_ADDRESS, ord('a'))
  276. # time.sleep(1)
  277. # bus.write_byte(SLAVE_ADDRESS, ord('a'))
  278. # time.sleep(1)
  279. # print("Sent '{}' to arduino.".format(ord('a')))
  280. # except:
  281. # print("Bus I/O error: {}".format(str(e)))
  282. # continue