Ask Your Question

Eisen's profile - activity

2016-01-20 12:38:24 -0600 received badge  Student (source)
2015-09-24 17:09:46 -0600 asked a question Convert PByte to Mat

Hi, I am using PS3 camera with opencv it works with PByte data type.Gives PByte as frame result.In SDK samples they use IplImage.I want use Mat instead IplImage.But i cant convert PByte to Mat.How can i convert PByte to Mat ? Example In SDK

    IplImage *pCapImage;
    PBYTE pCapBuffer = NULL;
    cvGetImageRawData(pCapImage, &pCapBuffer);
    while(_running)
        {
            cvGetImageRawData(pCapImage, &pCapBuffer);
            CLEyeCameraGetFrame(_cam, pCapBuffer, (i==0)?2000:0);
}

//Function from SDK

IMPORT(bool) CLEyeCameraGetFrame(CLEyeCameraInstance cam, PBYTE pData, int waitTimeout = 2000);

//MyCode //I use 2 camera.

    cv::Mat pCapImage[2];
    PBYTE pCapBuffer = NULL;
    while(_running){

        for (int i = 0; i < 2; i++)
                {
                                  //trying convert
                    pCapImage[i] = cv::Mat (cvSize(w, h), 4, &pCapBuffer,IPL_DEPTH_8U).clone();

                    CLEyeCameraGetFrame(_cam[i], pCapBuffer);
                }

                Mat pCapImageMatL = pCapImage[0];
                Mat pCapImageMatR = pCapImage[1];
                imshow(_windowNameL, pCapImageMatL);
                imshow(_windowNameR, pCapImageMatR);
}
2015-09-23 21:37:50 -0600 received badge  Supporter (source)
2015-09-16 11:13:56 -0600 asked a question Right method for color object tracking

Hi, I am trying to detect colored balls like ps3 move controller balls from 2 mt distance.I have 10 camera in same room hanging from the ceiling.Room is dark and balls have led inside.I have 4-5 balls.(red,blue,green,yellow,pink). I want track their position with opencv.Whats the right mehtod for doing this in opencv ? Can u give link , example for this ?

Why am i doing this ? I attach this balls to humans head and track their position.After that transfer their position to my program.For example i will see red person in x:39 y:69 blue person x:49 y:59 ....

2015-09-16 02:40:24 -0600 received badge  Enthusiast
2015-09-15 16:53:10 -0600 received badge  Editor (source)
2015-09-15 16:46:16 -0600 commented question Object tracking lag problem

Pls check this. https://www.youtube.com/watch?v=_BKtJ... -- First 10 sec with trackFilteredObject(yellow,threshold,HSV,imgOriginal); functions. After 10 sec. i comment this lines.

2015-09-15 16:22:45 -0600 commented question Object tracking lag problem

colored ping pong balls from 2 meter distance like ps3 move balls.

2015-09-15 15:29:54 -0600 asked a question Object tracking lag problem

Hi, I am using this code for track 4 colored objects.I get video from 8 ps3 eye camera.When i comment // my trackFilteredObject line there is no lag.But when using this code i have lot latency.I cant understand why happening because my normal cpu usage ~%15 ram usage 6.3GB/15GB (%40) when run this code cpu usage ~20-23 ram usage 6.4GB . I think its not about cpu-ram performance.What am i doing wrong ?

Video: https://www.youtube.com/watch?v=_BKtJ... (You can see lag in first 10 sec.After 10 sen i comment tracking codes.)

Note:Kamerasayisi mean cameracount My Track Function:

void trackFilteredObject(Object theObject,Mat threshold,Mat HSV, Mat &cameraFeed){
    //max number of objects to be detected in frame
    const int FRAME_WIDTH = 5120;
const int FRAME_HEIGHT = 480;

    const int MAX_NUM_OBJECTS=50;
//minimum and maximum object area
const int MIN_OBJECT_AREA = 10*10;
const int MAX_OBJECT_AREA = FRAME_HEIGHT*FRAME_WIDTH/1.5;
    vector <Object> objects;

    Mat temp;
    threshold.copyTo(temp);
    //these two vectors needed for output of findContours
    vector< vector<Point> > contours;
    vector<Vec4i> hierarchy;
    //find contours of filtered image using openCV findContours function
    findContours(temp,contours,hierarchy,CV_RETR_CCOMP,CV_CHAIN_APPROX_SIMPLE );
    //use moments method to find our filtered object
    double refArea = 0;
    bool objectFound = false;
    if (hierarchy.size() > 0) {
        int numObjects = hierarchy.size();
        //if number of objects greater than MAX_NUM_OBJECTS we have a noisy filter
        if(numObjects<MAX_NUM_OBJECTS){
            for (int index = 0; index >= 0; index = hierarchy[index][0]) {

                Moments moment = moments((cv::Mat)contours[index]);
                double area = moment.m00;

        //if the area is less than 20 px by 20px then it is probably just noise
        //if the area is the same as the 3/2 of the image size, probably just a bad filter
        //we only want the object with the largest area so we safe a reference area each
                //iteration and compare it to the area in the next iteration.
                if(area>MIN_OBJECT_AREA){

                    Object object;

                    object.setXPos(moment.m10/area);
                    object.setYPos(moment.m01/area);
                    object.setType(theObject.getType());
                    object.setColor(theObject.getColor());

                    objects.push_back(object);

                    objectFound = true;

                }else objectFound = false;
            }
            //let user know you found an object
            if(objectFound ==true){
                //draw object location on screen
                drawObject(objects,cameraFeed,temp,contours,hierarchy);}

        }else putText(cameraFeed,"TOO MUCH NOISE! ADJUST FILTER",Point(0,50),1,2,Scalar(0,0,255),2);
    }       
}
};

My Main Code:

    void Run()
    {


        int w, h;

        _fps = 30;
        IplImage *pCapImage[kameraSayisi];
        IplImage *pDisplayImage;
        PBYTE pCapBuffer = NULL;
        // Create camera instance
        for(int i = 0; i < kameraSayisi; i++)
        {
            _cam[i] = CLEyeCreateCamera(_cameraGUID[i], _mode, _resolution, _fps);
            if(_cam[i] == NULL) return;
            // Get camera frame dimensions
            CLEyeCameraGetFrameDimensions(_cam[i], w, h);
            // Create the OpenCV images
            pCapImage[i] = cvCreateImage(cvSize(w, h), IPL_DEPTH_8U, 1);

            // Set some camera parameters
            CLEyeSetCameraParameter(_cam[i], CLEYE_GAIN, 0);
            CLEyeSetCameraParameter(_cam[i], CLEYE_EXPOSURE, 511);

            // Start capturing
            CLEyeCameraStart(_cam[i]);


        }
        pDisplayImage = cvCreateImage(cvSize(w*kameraSayisi / 2, h * kameraSayisi/4 ), IPL_DEPTH_8U  ,1);

        if(_cam == NULL)        return;

  int iLastX = -1; 
 int iLastY = -1;



  //Capture a temporary image from the camera
    //program
    bool trackObjects = true;
    bool useMorphOps = true ...
(more)
2015-09-12 15:32:50 -0600 commented question PS3 Eye Camera Color based tracking

could u give an example ? how can i change this line ? IplImage* image = cvCreateImage(cvSize(pCapImage->width, pCapImage->height), IPL_DEPTH_8U, 3);

2015-09-12 06:48:11 -0600 commented question PS3 Eye Camera Color based tracking

what does it mean outdated c-api ? whats new version ? have link about it.I am newbie in opencv. I am just trying learn from internet samples.

2015-09-12 01:04:49 -0600 asked a question PS3 Eye Camera Color based tracking

Hi, I want track multiple object.I buy 4 camera licence but i cant find how can i track color based object position.I am traying change Face tracker code but when i run this code i see gray 2 screen.Where is my error ? My code is in Run Method

    //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//
// This file is part of CL-EyeMulticam SDK
//
// C++ CLEyeFaceTracker Sample Application
//
// For updates and file downloads go to: http://codelaboratories.com
//
// Copyright 2008-2012 (c) Code Laboratories, Inc. All rights reserved.
//
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
#include "stdafx.h"
#include <iostream>
#include "opencv2\highgui\highgui.hpp"
#include "opencv2\imgproc\imgproc.hpp"

using namespace cv;
using namespace std;
// Sample camera capture and processing class
class CLEyeCameraCapture
{
    CHAR _windowName[256];
    GUID _cameraGUID;
    CLEyeCameraInstance _cam;
    CLEyeCameraColorMode _mode;
    CLEyeCameraResolution _resolution;
    float _fps;
    HANDLE _hThread;
    bool _running;
public:
    CLEyeCameraCapture(LPSTR windowName, GUID cameraGUID, CLEyeCameraColorMode mode, CLEyeCameraResolution resolution, float fps) :
    _cameraGUID(cameraGUID), _cam(NULL), _mode(mode), _resolution(resolution), _fps(fps), _running(false)
    {
        strcpy(_windowName, windowName);
    }
    bool StartCapture()
    {
        _running = true;
        cvNamedWindow(_windowName, CV_WINDOW_AUTOSIZE);
        // Start CLEye image capture thread
        _hThread = CreateThread(NULL, 0, &CLEyeCameraCapture;::CaptureThread, this, 0, 0);
        if(_hThread == NULL)
        {
            MessageBox(NULL,"Could not create capture thread","CLEyeMulticamTest", MB_ICONEXCLAMATION);
            return false;
        }
        return true;
    }
    void StopCapture()
    {
        if(!_running)    return;
        _running = false;
        WaitForSingleObject(_hThread, 1000);
        cvDestroyWindow(_windowName);
    }
    void IncrementCameraParameter(int param)
    {
        if(!_cam)    return;
        CLEyeSetCameraParameter(_cam, (CLEyeCameraParameter)param, CLEyeGetCameraParameter(_cam, (CLEyeCameraParameter)param)+10);
    }
    void DecrementCameraParameter(int param)
    {
        if(!_cam)    return;
        CLEyeSetCameraParameter(_cam, (CLEyeCameraParameter)param, CLEyeGetCameraParameter(_cam, (CLEyeCameraParameter)param)-10);
    }
    void Run()
    {
        int w, h;
        IplImage *pCapImage;
        PBYTE pCapBuffer = NULL;
        // Create camera instance
        _cam = CLEyeCreateCamera(_cameraGUID, _mode, _resolution, _fps);
        if(_cam == NULL)        return;
        // Get camera frame dimensions
        CLEyeCameraGetFrameDimensions(_cam, w, h);
        // Depending on color mode chosen, create the appropriate OpenCV image
        if(_mode == CLEYE_COLOR_PROCESSED || _mode == CLEYE_COLOR_RAW)
            pCapImage = cvCreateImage(cvSize(w, h), IPL_DEPTH_8U, 4);
        else
            pCapImage = cvCreateImage(cvSize(w, h), IPL_DEPTH_8U, 1);

        // Set some camera parameters
        CLEyeSetCameraParameter(_cam, CLEYE_GAIN, 10);
        CLEyeSetCameraParameter(_cam, CLEYE_EXPOSURE, 511);

        // Start capturing
        CLEyeCameraStart(_cam);

        CvMemStorage* storage = cvCreateMemStorage(0);
        // Get the current app path
        char strPathName[_MAX_PATH];
        GetModuleFileName(NULL, strPathName, _MAX_PATH);
        *(strrchr(strPathName, '\\') + 1) = '\0';
        // append the xml file name
        strcat(strPathName, "haarcascade_frontalface_default.xml");
        CvHaarClassifierCascade* cascade = cvLoadHaarClassifierCascade(strPathName, cvSize(24, 24));
        IplImage* image = cvCreateImage(cvSize(pCapImage->width, pCapImage->height), IPL_DEPTH_8U, 3);
        IplImage* temp = cvCreateImage(cvSize(pCapImage->width >> 1, pCapImage->height >> 1), IPL_DEPTH_8U, 3);

        //namedWindow("kontrol",CV_WINDOW_AUTOSIZE);
        int Hmindeg=170;
        int Hmaxdeg=179;

        int Smindeg=150;
        int Smaxdeg=255;

        int Vmindeg=60;
        int Vmaxdeg=255;

        /*createTrackbar("minH","kontrol",&Hmindeg;,179);
        createTrackbar("maxH","kontrol",&Hmaxdeg;,179);

        createTrackbar("minS","kontrol",&Smindeg;,255);
        createTrackbar("maxS","kontrol",&Smaxdeg;,255);

        createTrackbar("minV","kontrol",&Vmindeg;,255);
        createTrackbar("maxV","kontrol",&Vmaxdeg;,255);*/

        int eskix = -1;
        int eskiy=-1;
        // image capturing loop
        while(_running)
        {
            cvGetImageRawData(pCapImage, &pCapBuffer;);
            CLEyeCameraGetFrame(_cam, pCapBuffer);

            cvConvertImage(pCapImage, image);

            Mat yeniframe = cvarrToMat(image);

            Mat cizgiresim = Mat::zeros(yeniframe.size(),CV_8UC3);

            Mat HSVres;
            cvtColor(yeniframe,HSVres,CV_BGR2HSV);
            Mat isres;
            inRange(HSVres,Scalar(Hmindeg,Smindeg,Vmindeg),Scalar(Hmaxdeg,Smaxdeg,Vmaxdeg),isres);

            erode(isres,isres,getStructuringElement(MORPH_ELLIPSE,Size(5,5)));
            dilate(isres,isres,getStructuringElement(MORPH_ELLIPSE,Size(5,5)));

            dilate(isres,isres,getStructuringElement(MORPH_ELLIPSE,Size(5,5)));
            erode(isres,isres,getStructuringElement(MORPH_ELLIPSE ...
(more)