Ask Your Question

Revision history [back]

opencv bag of features.. classificatioon nproblem

This is my code.... It works well but, the classifier is not predicting right.. it shows the same result each time for any kind of evaluation data.. I am not able to figure out where the problem lies. Its very urgent. I'll be grateful

include "stdafx.h"

include <vector>

include <boost filesystem.hpp="">

include <opencv2 opencv.hpp="">

include<stdio.h>

include<conio.h>

using namespace std; using namespace boost::filesystem; using namespace cv;

//location of the training data

define TRAINING_DATA_DIR "data/train/"

//location of the evaluation data

define EVAL_DATA_DIR "data/eval/"

//See article on BoW model for details

Ptr<featuredetector> detector = FeatureDetector::create("SURF"); Ptr<descriptorextractor> extractor = DescriptorExtractor::create("SURF"); Ptr<descriptormatcher> matcher = DescriptorMatcher::create("FlannBased"); //See article on BoW model for details int dictionarySize = 1000; TermCriteria tc(CV_TERMCRIT_ITER+CV_TERMCRIT_EPS, 10, 0.001); int retries = 1; int flags = KMEANS_PP_CENTERS;

//See article on BoW model for details BOWKMeansTrainer bowTrainer(dictionarySize, tc, retries, flags); //See article on BoW model for details BOWImgDescriptorExtractor bowDE(extractor, matcher); /* CvSVMParams params; params.svm_type = CvSVM::C_SVC; params.kernel_type = CvSVM::LINEAR; params.term_crit = cvTermCriteria(CV_TERMCRIT_ITER, 100, 1e-6); / /* * \brief Recursively traverses a folder hierarchy. Extracts features from the training images and adds them to the bowTrainer. */

bool readVocabulary( const string& filename, Mat& vocabulary ) { cout << "Reading vocabulary..."; FileStorage fs( filename, FileStorage::READ ); if( fs.isOpened() ) { fs["vocabulary"] >> vocabulary; cout << "done" << endl; return true; } return false; }

bool writeVocabulary( const string& filename, const Mat& vocabulary ) { cout << "Saving vocabulary..." << endl; FileStorage fs( filename, FileStorage::WRITE ); if( fs.isOpened() ) { fs << "vocabulary" << vocabulary; return true; } return false; }

void extractTrainingVocabulary(const path& basepath) { for (directory_iterator iter = directory_iterator(basepath); iter != directory_iterator(); iter++) { directory_entry entry = *iter;

    if (is_directory(entry.path())) {

        cout << "Processing directory " << entry.path().string() << endl;
        extractTrainingVocabulary(entry.path());

    } else {

        path entryPath = entry.path();
        if (entryPath.extension() == ".jpg") {

            cout << "Processing file " << entryPath.string() << endl;
            Mat img = imread(entryPath.string(),0);
            if (!img.empty()) {
                vector<KeyPoint> keypoints;
                detector->detect(img, keypoints);
                if (keypoints.empty()) {
                    cerr << "Warning: Could not find key points in image: "
                            << entryPath.string() << endl;
                } else {
                    Mat features;
                    extractor->compute(img, keypoints, features);
                    bowTrainer.add(features);
                }
            } else {
                cerr << "Warning: Could not read image: "
                        << entryPath.string() << endl;
            }

        }
    }
}

}

/** * \brief Recursively traverses a folder hierarchy. Creates a BoW descriptor for each image encountered. */ void extractBOWDescriptor(const path& basepath, Mat& descriptors, Mat& labels) { for (directory_iterator iter = directory_iterator(basepath); iter != directory_iterator(); iter++) { directory_entry entry = *iter; if (is_directory(entry.path())) { cout << "Processing directory " << entry.path().string() << endl; extractBOWDescriptor(entry.path(), descriptors, labels); } else { path entryPath = entry.path(); if (entryPath.extension() == ".jpg") { cout << "Processing file " << entryPath.string() << endl; Mat img = imread(entryPath.string(),0); if (!img.empty()) { vector<keypoint> keypoints; detector->detect(img, keypoints); if (keypoints.empty()) { cerr << "Warning: Could not find key points in image: " << entryPath.string() << endl; } else { Mat histogram; bowDE.compute(img, keypoints, histogram); descriptors.push_back(histogram); float label=atof(entryPath.filename().string().c_str()); labels.push_back(label); } } else { cerr << "Warning: Could not read image: " << entryPath.string() << endl; } } } }

}

int main(int argc, char ** argv) {

cout<<"Creating dictionary..."<<endl;
Mat dictionary;
if( !readVocabulary( "k.txt", dictionary) )
{
extractTrainingVocabulary(path(TRAINING_DATA_DIR));
vector<Mat> descriptors = bowTrainer.getDescriptors();
int count=0;
for(vector<Mat>::iterator iter=descriptors.begin();iter!=descriptors.end();iter++)
{
    count+=iter->rows;
}
cout<<"Clustering "<<count<<" features"<<endl;

 dictionary = bowTrainer.cluster();
if( !writeVocabulary("k.txt", dictionary) )
    {
        cout << "Error: file k.txt" << " can not be opened to write" << endl;
        exit(-1);
    }
}
else
    bowDE.setVocabulary(dictionary);


cout<<"Processing training data..."<<endl;
Mat trainingData(0, dictionarySize, CV_32FC1);
Mat labels(0, 1, CV_32FC1);
extractBOWDescriptor(path(TRAINING_DATA_DIR), trainingData, labels);
if( !writeVocabulary("Histogram.txt", trainingData) )
    {
        cout << "Error: file Histogram.txt" << " can not be opened to write" << endl;
        exit(-1);
    }


CvNormalBayesClassifier classifier;
classifier=new CvNormalBayesClassifier();
Mat temp;
cout<<"Training classifier..."<<endl;
if( !readVocabulary( "BayesDictalology.xml", temp) ){
    classifier.train(trainingData, labels);
    classifier.save("BayesDictalology.xml","BayesDictalology");
}
else
    classifier.load("BayesDictalology.xml","BayesDictalology");
//CvSVM SVM;
//SVM.train(trainingData,labels);
cout<<"Processing evaluation data..."<<endl;
Mat evalData(0, dictionarySize, CV_32FC1);
Mat groundTruth(0, 1, CV_32FC1);
extractBOWDescriptor(path(EVAL_DATA_DIR), evalData, groundTruth);

cout<<"Evaluating classifier..."<<endl;
Mat results; 
//cout<<SVM.predict(evalData);
classifier.predict(evalData, &results);
int rows=results.rows;
int columns=results.cols;
for(int i=0;i<rows;i++)
{
    for(int j=0;j<columns;j++)
        cout<<" "<<results.at<float>(i,j);
    cout<<endl;

}
_getche();
double errorRate = (double) countNonZero(groundTruth - results) / evalData.rows;
        ;
cout << "Error rate: " << errorRate << endl;
_getche();

}

opencv bag of features.. classificatioon nproblem

This is my code.... It works well but, the classifier is not predicting right.. it shows the same result each time for any kind of evaluation data.. data. I am not able to figure out where the problem lies. Its very urgent. I'll be grateful

include "stdafx.h"

include <vector>

include <boost filesystem.hpp="">

include <opencv2 opencv.hpp="">

include<stdio.h>

include<conio.h>

#include "stdafx.h"
#include <vector>
#include <boost/filesystem.hpp>
#include <opencv2/opencv.hpp>
#include<stdio.h>
#include<conio.h>
using namespace std;
using namespace boost::filesystem;
using namespace cv;

cv; //location of the training data

define data #define TRAINING_DATA_DIR "data/train/"

"data/train/" //location of the evaluation data

define data #define EVAL_DATA_DIR "data/eval/"

"data/eval/" //See article on BoW model for details

Ptr<featuredetector> details Ptr<FeatureDetector> detector = FeatureDetector::create("SURF"); Ptr<descriptorextractor> Ptr<DescriptorExtractor> extractor = DescriptorExtractor::create("SURF"); Ptr<descriptormatcher> Ptr<DescriptorMatcher> matcher = DescriptorMatcher::create("FlannBased"); //See article on BoW model for details int dictionarySize = 1000; TermCriteria tc(CV_TERMCRIT_ITER+CV_TERMCRIT_EPS, 10, 0.001); int retries = 1; int flags = KMEANS_PP_CENTERS;

KMEANS_PP_CENTERS; //See article on BoW model for details BOWKMeansTrainer bowTrainer(dictionarySize, tc, retries, flags); //See article on BoW model for details BOWImgDescriptorExtractor bowDE(extractor, matcher); /* CvSVMParams params; params.svm_type = CvSVM::C_SVC; params.kernel_type = CvSVM::LINEAR; params.term_crit = cvTermCriteria(CV_TERMCRIT_ITER, 100, 1e-6); / /* */ /** * \brief Recursively traverses a folder hierarchy. Extracts features from the training images and adds them to the bowTrainer. */

*/ bool readVocabulary( const string& filename, Mat& vocabulary ) { cout << "Reading vocabulary..."; FileStorage fs( filename, FileStorage::READ ); if( fs.isOpened() ) { fs["vocabulary"] >> vocabulary; cout << "done" << endl; return true; } return false; }

} bool writeVocabulary( const string& filename, const Mat& vocabulary ) { cout << "Saving vocabulary..." << endl; FileStorage fs( filename, FileStorage::WRITE ); if( fs.isOpened() ) { fs << "vocabulary" << vocabulary; return true; } return false; }

} void extractTrainingVocabulary(const path& basepath) { for (directory_iterator iter = directory_iterator(basepath); iter != directory_iterator(); iter++) { directory_entry entry = *iter;

*iter;
 if (is_directory(entry.path())) {
  cout << "Processing directory " << entry.path().string() << endl;
 extractTrainingVocabulary(entry.path());
  } else {
  path entryPath = entry.path();
 if (entryPath.extension() == ".jpg") {
 cout << "Processing file " << entryPath.string() << endl;
  Mat img = imread(entryPath.string(),0);
 if (!img.empty()) {
 vector<KeyPoint> keypoints;
 detector->detect(img, keypoints);
 if (keypoints.empty()) {
  cerr << "Warning: Could not find key points in image: "
 << entryPath.string() << endl;
  } else {
 Mat features;
  extractor->compute(img, keypoints, features);
 bowTrainer.add(features);
}
 }
 } else {
  cerr << "Warning: Could not read image: "
 << entryPath.string() << endl;
}
}
}
}

}

} } } } } /** * \brief Recursively traverses a folder hierarchy. Creates a BoW descriptor for each image encountered. */ void extractBOWDescriptor(const path& basepath, Mat& descriptors, Mat& labels) { for (directory_iterator iter = directory_iterator(basepath); iter != directory_iterator(); iter++) { directory_entry entry = *iter; if (is_directory(entry.path())) { cout << "Processing directory " << entry.path().string() << endl; extractBOWDescriptor(entry.path(), descriptors, labels); } else { path entryPath = entry.path(); if (entryPath.extension() == ".jpg") { cout << "Processing file " << entryPath.string() << endl; Mat img = imread(entryPath.string(),0); if (!img.empty()) { vector<keypoint> vector<KeyPoint> keypoints; detector->detect(img, keypoints); if (keypoints.empty()) { cerr << "Warning: Could not find key points in image: " << entryPath.string() << endl; } else { Mat histogram; bowDE.compute(img, keypoints, histogram); descriptors.push_back(histogram); float label=atof(entryPath.filename().string().c_str()); labels.push_back(label); } } else { cerr << "Warning: Could not read image: " << entryPath.string() << endl; } } } }

}

} } int main(int argc, char ** argv) {

{
cout<<"Creating dictionary..."<<endl;
 Mat dictionary;
 if( !readVocabulary( "k.txt", dictionary) )
{
 {
extractTrainingVocabulary(path(TRAINING_DATA_DIR));
 vector<Mat> descriptors = bowTrainer.getDescriptors();
 int count=0;
 for(vector<Mat>::iterator iter=descriptors.begin();iter!=descriptors.end();iter++)
{
 {
 count+=iter->rows;
}
 }
cout<<"Clustering "<<count<<" features"<<endl;
 dictionary = bowTrainer.cluster();
 if( !writeVocabulary("k.txt", dictionary) )
{
 {
 cout << "Error: file k.txt" << " can not be opened to write" << endl;
 exit(-1);
}
}
 }
}
else
 bowDE.setVocabulary(dictionary);
 cout<<"Processing training data..."<<endl;
 Mat trainingData(0, dictionarySize, CV_32FC1);
 Mat labels(0, 1, CV_32FC1);
 extractBOWDescriptor(path(TRAINING_DATA_DIR), trainingData, labels);
 if( !writeVocabulary("Histogram.txt", trainingData) )
{
 {
 cout << "Error: file Histogram.txt" << " can not be opened to write" << endl;
 exit(-1);
}
 }
CvNormalBayesClassifier classifier;
 classifier=new CvNormalBayesClassifier();
 Mat temp;
 cout<<"Training classifier..."<<endl;
 if( !readVocabulary( "BayesDictalology.xml", temp) ){
 classifier.train(trainingData, labels);
 classifier.save("BayesDictalology.xml","BayesDictalology");
}
 }
else
 classifier.load("BayesDictalology.xml","BayesDictalology");
 //CvSVM SVM;
 //SVM.train(trainingData,labels);
 cout<<"Processing evaluation data..."<<endl;
 Mat evalData(0, dictionarySize, CV_32FC1);
 Mat groundTruth(0, 1, CV_32FC1);
 extractBOWDescriptor(path(EVAL_DATA_DIR), evalData, groundTruth);
 cout<<"Evaluating classifier..."<<endl;
 Mat results;
 //cout<<SVM.predict(evalData);
 classifier.predict(evalData, &results);
 int rows=results.rows;
 int columns=results.cols;
 for(int i=0;i<rows;i++)
{
 {
 for(int j=0;j<columns;j++)
 cout<<" "<<results.at<float>(i,j);
 cout<<endl;
}
 }
_getche();
 double errorRate = (double) countNonZero(groundTruth - results) / evalData.rows;
 ;
 cout << "Error rate: " << errorRate << endl;
 _getche();
}

}