Ask Your Question

Revision history [back]

Failed to implement lkdemo on iOS

I tried to implement lkdemo's cpp example in iOS, and got reference from thorrson's lkdemo. I just use goodFeaturesToTrack() to automatically detect features for tracking. All the stuff processed in processImage: method based on iOS documentation but the features are not tracking properly. The first features are detected and appear in cameraview but never tracked, and i try to debug the calcOpticalFlowPyrLK but it still working. Is there something wrong? Here's my code on processImage methods:

- (void)processImage:(cv::Mat &)image
{

    Mat curntImage, curntGray, prevGray;
    cvtColor(image, curntGray, CV_BGR2GRAY);
    if (!isDetected) {

        goodFeaturesToTrack(curntGray, features[1], maxCorners, qualityLevel, minDistance);
        cornerSubPix(curntGray, features[1], subPixWinSize, cv::Size(-1,-1), termcrit);

        isDetected = YES;

        NSLog(@"Feature detected!");

    } else if (!features[0].empty()) {

        vector<uchar> status;
        vector<float> err;

        if(prevGray.empty()) {
            curntGray.copyTo(prevGray);
        }

        calcOpticalFlowPyrLK(prevGray, curntGray, features[0], features[1], status, err, winSize,
                     3, termcrit, 0, 0.001);

        int radius = 10; // 4, 3, 5, 10
        RNG rng(12345);
        Scalar color = Scalar(rng.uniform(0,255), rng.uniform(0,255),
                      rng.uniform(0,255));

        size_t i, k;
        for (i=k=0; i < features[1].size(); i++) {

            if (!status[i])
                continue;

            features[1][k++] = features[1][i];
            circle(image,features[1][i], radius, color, -1, 8); //...-1, 8, 0);

        }
        features[1].resize(k);

        //NSLog(@"Feature tracked on iteration");
    }

    dispatch_async(dispatch_get_main_queue(), ^{
        self.cameraView.image = [UIImage UIImageFromCVMat:image];
    });


    std::swap(features[0], features[1]);
    cv::swap(curntGray, prevGray);
}