facetracker.py 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322
  1. #! /usr/bin/python3
  2. from picamera.array import PiRGBArray
  3. from picamera import PiCamera
  4. from datetime import datetime
  5. import time
  6. import cv2
  7. import sys
  8. import imutils
  9. import np
  10. import math # for sqrt distance formula
  11. import random
  12. # i2c stuff
  13. import smbus
  14. bus = smbus.SMBus(1)
  15. SLAVE_ADDRESS = 0x04
  16. time_before_scan = 3*60*1000.0
  17. time_after_scan = (3*60+15)*1000.0
  18. # Create the haar cascade
  19. #frontalfaceCascade = cv2.CascadeClassifier("haarcascade_frontalface_default.xml")
  20. frontalfaceCascade = cv2.CascadeClassifier("frontalface_fromweb.xml")
  21. profilefaceCascade = cv2.CascadeClassifier("haarcascade_profileface.xml")
  22. face = [0,0,0,0] # This will hold the array that OpenCV returns when it finds a face: (makes a rectangle)
  23. center = [0,0] # Center of the face: a point calculated from the above variable
  24. lastface = 0 # int 1-3 used to speed up detection. The script is looking for a right profile face,-
  25. # a left profile face, or a frontal face; rather than searching for all three every time,-
  26. # it uses this variable to remember which is last saw: and looks for that again. If it-
  27. # doesn't find it, it's set back to zero and on the next loop it will search for all three.-
  28. # This basically tripples the detect time so long as the face hasn't moved much.
  29. scanleft = True # Should we scan for left profiles?
  30. scanright = True # should we scan for right profiles?
  31. # initialize the camera and grab a reference to the raw camera capture
  32. camera = PiCamera()
  33. #camera.resolution = (160, 120)
  34. #camera.resolution = (640,480)
  35. camera.resolution = (1024,768)
  36. cameracenter = (camera.resolution[0]/2, camera.resolution[1]/2)
  37. camera.framerate = 32
  38. rawCapture = PiRGBArray(camera, camera.resolution)
  39. # Points to the last place we sawa a face
  40. target = ( camera.resolution[0]/2, camera.resolution[1]/2 )
  41. # Fisheye corrections. See https://medium.com/@kennethjiang/calibrate-fisheye-lens-using-opencv-333b05afa0b0
  42. # 640x480:
  43. #correct_fisheye = False
  44. #DIM=(640, 480)
  45. #K=np.array([[363.787052141742, 0.0, 332.09761373599576], [0.0, 362.23769923959975, 238.35982850966641], [0.0, 0.0, 1.0]])
  46. #D=np.array([[-0.019982864934848042], [-0.10107557279423625], [0.20401597940960342], [-0.1406464201639892]])
  47. # 1024x768:
  48. correct_fisheye = True
  49. DIM=(1024, 768)
  50. K=np.array([[583.6639649321671, 0.0, 518.0139106134624], [0.0, 580.8039721094127, 384.32095600935503], [0.0, 0.0, 1.0]])
  51. D=np.array([[0.0028045742945672475], [-0.14423839478882694], [0.23715105072799644], [-0.1400677375634837]])
  52. #def distance(p0, p1):
  53. # return math.sqrt((p0[0] - p1[0])**2 + (p0[1] - p1[1])**2)
  54. def search_rightprofile(i):
  55. return ()
  56. # return profilefaceCascade.detectMultiScale(i,1.3,4,(cv2.CV_HAAR_DO_CANNY_PRUNING + cv2.CV_HAAR_FIND_BIGGEST_OBJECT + cv2.CV_HAAR_DO_ROUGH_SEARCH),(30,30))
  57. if scanright:
  58. return profilefaceCascade.detectMultiScale(i)
  59. return profilefaceCascade.detectMultiScale(i, maxSize=(30,30))
  60. else:
  61. return ()
  62. def search_leftprofile(i):
  63. return ()
  64. if scanleft:
  65. revimage = cv2.flip(i, 1) # Flip the image
  66. # return profilefaceCascade.detectMultiScale(i,1.3,4,(cv2.CV_HAAR_DO_CANNY_PRUNING + cv2.CV_HAAR_FIND_BIGGEST_OBJECT + cv2.CV_HAAR_DO_ROUGH_SEARCH),(30,30))
  67. return profilefaceCascade.detectMultiScale(i)
  68. return profilefaceCascade.detectMultiScale(i, maxSize=(30,30))
  69. else:
  70. return ()
  71. def search_frontface(i):
  72. # return frontalfaceCascade.detectMultiScale(i,1.3,4,(cv2.CV_HAAR_DO_CANNY_PRUNING + cv2.CV_HAAR_FIND_BIGGEST_OBJECT + cv2.CV_HAAR_DO_ROUGH_SEARCH),(30,30))
  73. return frontalfaceCascade.detectMultiScale(i)
  74. return frontalfaceCascade.detectMultiScale(i, maxSize=(30,30))
  75. def undistort(i, balance=0.0, dim2=None, dim3=None):
  76. # Sanity Check the source dimensions
  77. dim1 = i.shape[:2][::-1] #dim1 is the dimension of input image to un-distort
  78. assert dim1[0]/dim1[1] == DIM[0]/DIM[1], "Image to undistort needs to have same aspect ratio as the ones used in calibration"
  79. if not dim2:
  80. dim2 = dim1
  81. if not dim3:
  82. dim3 = dim1
  83. scaled_K = K * dim1[0] / DIM[0] # The values of K is to scale with image dimension.
  84. scaled_K[2][2] = 1.0 # Except that K[2][2] is always 1.0
  85. # This is how scaled_K, dim2 and balance are used to determine the final K used to un-distort image. OpenCV document failed to make this clear!
  86. new_K = cv2.fisheye.estimateNewCameraMatrixForUndistortRectify(scaled_K, D, dim2, np.eye(3), balance=balance)
  87. map1, map2 = cv2.fisheye.initUndistortRectifyMap(scaled_K, D, np.eye(3), new_K, dim3, cv2.CV_16SC2)
  88. return cv2.remap(i, map1, map2, interpolation=cv2.INTER_LINEAR, borderMode=cv2.BORDER_CONSTANT)
  89. def findface(image):
  90. global lastface
  91. global correct_fisheye
  92. faces = ();
  93. # TODO: There is a better way to do this. Find it.
  94. # First Scan
  95. if lastface == 1:
  96. faces = search_rightprofile(image)
  97. if faces != ():
  98. lastface = 1
  99. return faces
  100. elif lastface == 2:
  101. faces = search_leftprofile(image)
  102. if faces != ():
  103. lastface = 2
  104. return faces
  105. else:
  106. faces = search_frontface(image)
  107. if faces != ():
  108. faceFound=True
  109. return faces
  110. # Second scan
  111. if lastface == 1:
  112. faces = search_frontface(image)
  113. if faces != ():
  114. lastface = 3
  115. return faces
  116. elif lastface == 2:
  117. faces = search_rightprofile(image)
  118. if faces != ():
  119. lastface = 1
  120. return faces
  121. else:
  122. faces = search_leftprofile(image)
  123. if faces != ():
  124. lastface = 2
  125. return faces
  126. # Third scan
  127. if lastface == 1:
  128. faces = search_leftprofile(image)
  129. if faces != ():
  130. lastface = 2
  131. return faces
  132. elif lastface == 2:
  133. faces = search_frontface(image)
  134. if faces != ():
  135. lastface = 3
  136. return faces
  137. else:
  138. faces = search_rightprofile(image)
  139. if faces != ():
  140. lastface = 1
  141. return faces
  142. return ()
  143. def circlefaces(image, faces):
  144. global lastface
  145. for (x, y, w, h) in faces:
  146. cv2.circle(image, (int(x+w/2), int(y+h/2)), int((w+h)/3), (255, 255, 0), 1)
  147. # Temporary, save the image
  148. if random.randint(1, 10) == 1:
  149. # Save 1 out of 10
  150. cv2.imwrite("tmp/img.{}.facetype{}.png".format(datetime.now().strftime("%Y%m%d.%H%M%S.%f"), lastface), image)
  151. def distance_to_closest(faces):
  152. # Negative values will be left
  153. closestdistance = None
  154. for f in faces:
  155. x,y,w,h = f
  156. print("Face found at {},{} with width {} and height {}.".format(x,y,w,h))
  157. centerpoint = (w/2)+x
  158. distance = centerpoint - cameracenter[0]
  159. if(closestdistance == None or abs(distance) < closestdistance):
  160. print("Face closer to center detected. New target location: {} (ctr: {}) - distance: {}".format(centerpoint, cameracenter[0], distance))
  161. closestdistance = distance
  162. return closestdistance
  163. def send_char(tc):
  164. try:
  165. bus.write_byte(SLAVE_ADDRESS, ord(tc))
  166. except Exception as e:
  167. print("Bus Error while sending {}: {}".format(tc, str(e)))
  168. def stop():
  169. print("STOPPING")
  170. send_char(' ')
  171. return
  172. def left(distance):
  173. send_char('a')
  174. if abs(distance) > 300:
  175. print('GO LEFT')
  176. return
  177. elif abs(distance) > 50:
  178. print('GO LEFT FOR {}s (divisior 175)'.format(1.0*abs(distance)/175.0))
  179. time.sleep(1.0*abs(distance)/175.0)
  180. stop()
  181. else:
  182. print('GO LEFT FOR {}s (divisor 100)'.format(1.0*abs(distance)/100.0))
  183. time.sleep(1.0*abs(distance)/100.0)
  184. stop()
  185. def right(distance=None):
  186. send_char('d')
  187. if abs(distance) > 300:
  188. print("GO RIGHT")
  189. return
  190. elif abs(distance) > 50:
  191. print('GO RIGHT FOR {}s (divisior 175)'.format(1.0*abs(distance)/175.0))
  192. time.sleep(1.0*abs(distance)/175.0)
  193. stop()
  194. else:
  195. print('GO RIGHT FOR {}s (divisor 100)'.format(1.0*abs(distance)/100.0))
  196. time.sleep(1.0*abs(distance)/100.0)
  197. stop()
  198. def fire():
  199. print("FIRING!")
  200. send_char('F')
  201. return
  202. if __name__ == "__main__":
  203. # allow the camera to warmup
  204. time.sleep(0.1)
  205. lastTime = time.time()*1000.0
  206. lastDetected = lastTime
  207. lastAction = None
  208. totalFound = 0 # We don't want to fire too soon.
  209. # capture frames from the camera
  210. for frame in camera.capture_continuous(rawCapture, format="bgr", use_video_port=True):
  211. # grab the raw NumPy array representing the image, then initialize the timestamp
  212. # and occupied/unoccupied text
  213. timeSinceDetected = 1000.0*time.time() - lastDetected
  214. print('Time: {}; Time since detected: {} ({}/{})'.format(time.time()*1000.0 - lastTime, timeSinceDetected, time_before_scan, time_after_scan))
  215. lastTime = time.time()*1000.0
  216. image = frame.array
  217. # image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) # convert to greyscale
  218. if correct_fisheye:
  219. image = undistort(image, 0.8)
  220. if lastAction == "Fire":
  221. # Save the pic, sleep, and then resume
  222. lastAction = "Stop"
  223. cv2.imwrite("tmp/img.{}.FIRED.png".format(datetime.now().strftime("%Y%m%d.%H%M%S.%f")), image)
  224. print("Sleeping for 15 seconds...")
  225. time.sleep(15)
  226. print("Resuming...")
  227. rawCapture.truncate(0)
  228. continue
  229. faces = findface(image)
  230. if faces == ():
  231. print("No face found.")
  232. rawCapture.truncate(0)
  233. if timeSinceDetected > time_before_scan and timeSinceDetected < time_after_scan:
  234. if lastAction != "Scan":
  235. lastAction = "Scan"
  236. print("Beginning Scanning...")
  237. right(1000.0) # 1000 is arbitrary
  238. # Otherwise just keep doing what youre doing
  239. elif timeSinceDetected > time_after_scan:
  240. lastAction = "Stop"
  241. print("Stopping scanning...")
  242. lastDetected = time.time()*1000.0
  243. stop()
  244. else:
  245. lastAction = "Stop"
  246. stop()
  247. continue
  248. totalFound += 1
  249. lastDetected = time.time() * 1000.0
  250. circlefaces(image, faces)
  251. distance = distance_to_closest(faces)
  252. if lastAction == "Scan":
  253. # Scanning, but detected a face. Stop and continue
  254. print("Face detected. Aborted scanning.")
  255. stop()
  256. lastAction = "Stop"
  257. elif abs(distance) < 15:
  258. if lastAction == "Fire":
  259. # Do nothing
  260. time.sleep(1)
  261. elif lastAction == "Stop" and totalFound >= 5:
  262. totalFound = 0 # Restart the counter
  263. lastAction = "Fire"
  264. cv2.imwrite("tmp/img.{}.ABOUTTOFIRE.png".format(datetime.now().strftime("%Y%m%d.%H%M%S.%f")), image)
  265. fire()
  266. else:
  267. lastAction = "Stop"
  268. stop()
  269. elif distance < 0:
  270. lastAction = "Left"
  271. left(distance)
  272. elif distance > 0:
  273. lastAction = "Right"
  274. right(distance)
  275. else:
  276. print("Face found but no action taken. Distance = {}; Last Action = {}".format(distance, lastAction))
  277. # clear the stream in preparation for the next frame
  278. rawCapture.truncate(0)