Ask Your Question

Revision history [back]

click to hide/show revision 1
initial version

How to Surf Descriptors as input for a SVM Classifier

I am using the Java implementation to build a SVM classifier to classify tree leaves. I get the following errors when I try to train the SVM:

OpenCV Error: Sizes of input arguments do not match () in cv::Mat::push_back, file ........\opencv\modules\core\src\matrix.cpp, line 650 CvException [org.opencv.core.CvException: cv::Exception: ........\opencv\modules\core\src\matrix.cpp:650: error: (-209) in function cv::Mat::push_back ] at org.opencv.core.Mat.n_push_back(Native Method) at org.opencv.core.Mat.push_back(Mat.java:1863) at mark.smart.csc7057.surfdetectorandsvm.SurfDetectorAndSVM.main(SurfDetectorAndSVM.java:126) OpenCV Error: Sizes of input arguments do not match () in cv::Mat::push_back, file ........\opencv\modules\core\src\matrix.cpp, line 650 CvException [org.opencv.core.CvException: cv::Exception: ........\opencv\modules\core\src\matrix.cpp:650: error: (-209) in function cv::Mat::push_back ] at org.opencv.core.Mat.n_push_back(Native Method) at org.opencv.core.Mat.push_back(Mat.java:1863) at mark.smart.csc7057.surfdetectorandsvm.SurfDetectorAndSVM.main(SurfDetectorAndSVM.java:167)

OpenCV Error: Bad argument (There is only a single class) in cvPreprocessCategoricalResponses, file ........\opencv\modules\ml\src\inner_functions.cpp, line 729 Exception in thread "main" CvException [org.opencv.core.CvException: cv::Exception: ........\opencv\modules\ml\src\inner_functions.cpp:729: error: (-5) There is only a single class in function cvPreprocessCategoricalResponses ] at org.opencv.ml.CvSVM.train_0(Native Method) at org.opencv.ml.CvSVM.train(CvSVM.java:270) at mark.smart.csc7057.surfdetectorandsvm.SurfDetectorAndSVM.main(SurfDetectorAndSVM.java:228)

I have read that a BOW trainer should be employed to cluster the surf descriptors, unfortunately this feature is currently not available with the java implementation. I would appreciate any guidance (with sample code if possible) to resolve this problem. The code I have used is as follows:

public static void main(String[] args) { System.loadLibrary(Core.NATIVE_LIBRARY_NAME);

File leafImages = null;

String leafImageAbsPathAsStr = "";

File[] leafImageFilePaths;

File negImages = null;

String negImageAbsPathAsStr = "";

File[] negImageFilePaths;

final int leafImageWidth = 50; final int leafImageHeight = 50;

final int size = 200;

Mat leafGrayMat;
Mat leafBinMat = new Mat();

Mat negGrayMat; Mat negBinMat = new Mat();

Mat SVMtrainingData = new Mat(size, 1, CvType.CV_32FC1);

Mat labels = new Mat(size, 1, CvType.CV_32FC1);

List<float> trainingLabels = new ArrayList<float>();

MatOfKeyPoint keyPoints = new MatOfKeyPoint(); MatOfKeyPoint descriptors = new MatOfKeyPoint();

FeatureDetector featureDetector = FeatureDetector.create(FeatureDetector.SURF);

DescriptorExtractor descriptorExtractor = DescriptorExtractor.create(DescriptorExtractor.SURF);

try { leafImages = new File("Images//acer_campestre_100_images");

leafImageFilePaths = leafImages.listFiles();

for(File path : leafImageFilePaths)
{
  leafImageAbsPathAsStr = path.getAbsolutePath();

  leafGrayMat = Highgui.imread(leafImageAbsPathAsStr, Highgui.CV_LOAD_IMAGE_GRAYSCALE);

  Imgproc.adaptiveThreshold(leafGrayMat, leafBinMat, 255, Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C,    Imgproc.THRESH_BINARY, 55, 5);

  featureDetector.detect(leafBinMat, keyPoints);

  descriptorExtractor.compute(leafBinMat, keyPoints, descriptors);

  descriptors.convertTo(descriptors, CvType.CV_32FC1);

  SVMtrainingData.push_back(descriptors);

  trainingLabels.add(1.0f);
}

} catch(Exception e) { e.printStackTrace(); } try { negImages = new File("Images//neg_images_100");

   negImageFilePaths = negImages.listFiles();

   for(File path : negImageFilePaths)
   {
 negImageAbsPathAsStr = path.getAbsolutePath();

 negGrayMat = Highgui.imread(negImageAbsPathAsStr, Highgui.CV_LOAD_IMAGE_GRAYSCALE);

 Imgproc.adaptiveThreshold(negGrayMat, negBinMat, 255, Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C, Imgproc.THRESH_BINARY, 55, 5);

 featureDetector.detect(leafBinMat, keyPoints);

 descriptorExtractor.compute(leafBinMat, keyPoints, descriptors);

 SVMtrainingData.push_back(descriptors);

 trainingLabels.add(-1.0f);
  }

} catch(Exception e) { e.printStackTrace(); } Float[] trainingLabelsArray = trainingLabels.toArray(new Float[trainingLabels.size()]);

   float[] trainLabels = new float[trainingLabelsArray.length];
   for(int i = 0; i < trainingLabelsArray.length; i++)
   {
 trainLabels[i] = trainingLabelsArray[i];
   }

   for(int i = 0; i < trainingLabels.size(); i++)
   {
 labels.put(i, 1, trainLabels[i]);
   }    

CvSVMParams params = new CvSVMParams();
params.set_svm_type(CvSVM.C_SVC);
params.set_kernel_type(CvSVM.LINEAR);
params.set_degree(0);
params.set_gamma(1);
params.set_coef0(0);
params.set_C(1);
params.set_nu(0);
params.set_p(0);
TermCriteria tc = new TermCriteria(TermCriteria.EPS, 1000, 0.01);
params.set_term_crit(tc);

CvSVM svmClassifier = new CvSVM();
svmClassifier.train(SVMtrainingData, labels, new Mat(), new Mat(), params);
svmClassifier.save("C://Users//mark//Documents//CSC7057_Individual_Project_For_MSc//acer_campestre_classifier.xml");

}