In short, I am trying to track a fish in a 30min video (see screenshot below for an example). I set-up everything to tracking should be as easy as possible. I recorded a video with python with settings to create an almost blank video with only the fish having a dark shape.
I have written a script in python using opencv to track the fish using various methods including background subtraction, blurring, dilation, and tresholding. However, even with the very low bg noise videos I still can't track the fish 100% of the time without also tracking noise. Without background subtraction tracking of the fish is 100% but it also tracks some darker areas in the tank, and with background subtraction it somehow keeps on having trouble tracking the fish all the time but now doesn't track the background noise.
I hope somebody with more experience on this can help me with this. I thought with the clear video it shouldn't be too hard to do. A section of the 30min video can be found at http://mudfooted.com/testvid.mov if you'd like to try and this is my latest script:
import os
import cv2
import sys
import glob
import numpy as np
import csv
import shutil
from time import gmtime, strftime
#set standards
MINAREA = 250
MAXAREA = 5000
#set background image subtractor
bgs = cv2.BackgroundSubtractorMOG()
#Get list of video files
files = glob.glob("*.mov")
kernel = np.ones((8,8),np.uint8)
for f in files:
#Set-up writing of tracked video
fourcc = cv2.cv.CV_FOURCC('m', 'p', '4', 'v')
video = cv2.VideoWriter()
videoname = "".join([f[:-4],"_tracked.avi"])
video.open(videoname, fourcc, 12, (1420,690), True)
#Start with tracking new video
cap = cv2.VideoCapture(f)
tracklist = []
trajnr = 0
lastframe = 0
#Go through list of video frames
while True:
flag, frame = cap.read()
if flag == 0: #Something is wrong or end of the video file was reached
break
#Set the current timeframe
currframe = round(cap.get(cv2.cv.CV_CAP_PROP_POS_FRAMES)/12,2)
#Stop tracking after 30min
print currframe
if currframe > 1800:
break
# Modify image for tracking
cropped = frame[0:690, 0:1420] #crop image
img = cv2.GaussianBlur(cropped,(15,15),0) #blur image
#show image-bg image
fgmask = bgs.apply(img, None, 0.05) #low learning rate will account for fish not moving
img_tresh = cv2.threshold(fgmask, 0, 255, cv2.THRESH_BINARY)[1] #treshold image
img_dil = cv2.dilate(img_tresh,kernel,iterations = 1) #dilate objects above treshold
#Get list of objects
contours,hierarchy = cv2.findContours(img_dil,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)
cv2.drawContours(cropped, contours, -1, (255,0,0) ,1)
#Size filter for objects
for i in range(len(contours)):
cnt = contours[i]
M = cv2.moments(cnt)
area = cv2.contourArea(cnt)
if (area < MINAREA or area > MAXAREA):
continue
else:
#Set the new trajectory nr if time gap exists of 1sec
if trajnr == 0:
trajnr = 1
elif (currframe - lastframe)>1:
trajnr = trajnr+1
lastframe = currframe
#Get the coordinates of the object
cx = int(M['m10']/M['m00'])
cy = int(M['m01']/M['m00'])
#Store the data
tracklist.append((currframe,cx,cy,trajnr))
#Draw contour box and centroid for tracked objects
rect = cv2.minAreaRect(cnt)
box = cv2.cv.BoxPoints(rect)
box = np.int0(box)
cv2.drawContours(cropped, [box], 0, (0,0,255), 2)
cv2.circle(cropped, (cx,cy), 0, (255,255,255), thickness=3)
#Write the tracked frame to video
video.write(cropped)
#Show the tracked frame
cv2.imshow("Tracked image",cropped)
k = cv2.waitKey(1)
if k == 27:
cv2.destroyAllWindows()
print "User stopped video tracking"
break
print tracklist