Ask Your Question

Revision history [back]

click to hide/show revision 1
initial version

how sent a live stream(UDP) of frames captured by opencv

how to send cv::mat frames over network using socket programing??i did not get any output as i sent mat frames as such,and as pixels...so kindly help me...

how sent a live stream(UDP) of frames captured by opencv

how i created these programs as client and server for broadcasting(UDP) of frames. i got error as segmentation error while running client. code for client

//basic
#include <iostream>
#include <stdio.h>
#include<string.h>
#include<stdio.h>
#include<stdlib.h>
#include<unistd.h>
#include<errno.h>
#include<string.h>
//opencv libraries
#include "opencv2/objdetect.hpp"
#include "opencv2/highgui.hpp"
#include "opencv2/imgproc.hpp"
//socket libraries
#include<sys/types.h> 
#include<sys/socket.h>
#include<netinet/in.h>
#include<arpa/inet.h>
#include<netdb.h>
#define SERVERPORT "4950"   // the port users will be connecting to


using namespace std;
using namespace cv;

void detectAndDisplay( Mat frame , Mat frame_edit);
int talker(Mat frame);
/** Global variables */
String face_cascade_name = "lbpcascade_frontalface.xml";
String eyes_cascade_name = "haarcascade_eye_tree_eyeglasses.xml";
CascadeClassifier face_cascade;
CascadeClassifier eyes_cascade;


/** @function main */
int main( void ) 
{
VideoCapture capture;
Mat frame;
if( !face_cascade.load( face_cascade_name ) ){ printf("--(!)Error loading face cascade\n"); return -1; };
if( !eyes_cascade.load( eyes_cascade_name ) ){ printf("--(!)Error loading eyes cascade\n"); return -1; };
//-- 2. Read the video stream
capture.open( 1 );
if ( ! capture.isOpened() ) { printf("--(!)Error opening video capture\n"); return -1; }
capture.read(frame);
Mat frame_edit(frame.rows+100,frame.cols,CV_8UC3,Scalar::all(0));
while ( capture.read(frame) )
{
        if( frame.empty() )
        {
            printf(" --(!) No captured frame -- Break!");
            break;
        }
int k=100;
for(int i=0;i<frame.rows;i++)
{
    uchar* data=frame.ptr<uchar>(i);
    uchar* data1=frame_edit.ptr<uchar>(k);
    for(int l=0;l<frame.cols*frame.channels();l++)
        {   
            data1[l]=data[l];
        }
    k++;
}
    //-- 3. show frames
detectAndDisplay( frame_edit, frame );
    int c = waitKey(5);
    if( (char)c == 27 ) { break; } // escape
}
return 0;
}

/**
 * @function detectAndDisplay
 */
 void detectAndDisplay( Mat frame ,Mat frame_edit)
{
std::vector<Rect> faces;
Mat frame_gray;
cvtColor( frame_edit, frame_gray, COLOR_BGR2GRAY );
equalizeHist( frame_gray, frame_gray );
//-- Detect faces
face_cascade.detectMultiScale( frame_gray, faces, 1.1, 2, 0, Size(80, 80) );
for( size_t i = 0; i < faces.size(); i++ )
{
        Mat faceROI = frame_gray( faces[i] );
        std::vector<Rect> eyes;
    //-- In each face, detect eyes
        eyes_cascade.detectMultiScale( faceROI, eyes, 1.1, 2, 0 |CASCADE_SCALE_IMAGE, Size(30, 30) );
        if( eyes.size() == 2)
        {
        //-- Draw the face
            Point edge1(faces[i].x,faces[i].y+100),edge2(faces[i].x+faces[i].width,faces[i].y+faces[i].height+100);
            rectangle(frame,edge1,edge2,Scalar(255,255,255),1,8);

        }
}
putText(frame,"Signal level:",Point(0,50),1,2,Scalar::all(255),2);
putText(frame,"Battery Remaining:",Point(280,50),1,2,Scalar::all(255),2);
//-- Show what you got
imshow( "face detection client", frame );
int ret=talker(frame);
if(ret==1||ret==2)
{
    cout<<"error";
    exit(1);
}   
}

int talker(Mat frame)

{
int sockfd;
const char *argv[1]={"localhost"};
struct addrinfo hints, *servinfo, *p;
int rv;
int numbytes;
frame = (frame.reshape(0,1)); // to send cv::mat  frames over network using socket programing??i did not get any output as i sent mat frames as such,and as pixels...so kindly help me...

make it continuous int imgSize = frame.total()*frame.elemSize(); memset(&hints, 0, sizeof hints); hints.ai_family = AF_UNSPEC; hints.ai_socktype = SOCK_DGRAM; if ((rv = getaddrinfo(argv[1], SERVERPORT, &hints, &servinfo)) != 0) { fprintf(stderr, "getaddrinfo: %s\n", gai_strerror(rv)); return 1; } // loop through all the results and make a socket for(p = servinfo; p != NULL; p = p->ai_next) { if ((sockfd = socket(p->ai_family, p->ai_socktype,p->ai_protocol)) == -1) { perror("talker: socket"); continue; } break; } if (p == NULL) { fprintf(stderr, "talker: failed to bind socket\n"); return 2; } if ((numbytes = sendto(sockfd, frame.data, imgSize, 0,p->ai_addr, p->ai_addrlen)) == -1) { perror("talker: sendto"); exit(1); } freeaddrinfo(servinfo); cout<<"senting frame succesfull"<<endl; close(sockfd); return 0; }

how sent a live stream(UDP) of frames captured by opencv

i created these programs as client and server for broadcasting(UDP) of frames. i got error as segmentation error while running client. code for client

//basic
#include <iostream>
#include <stdio.h>
#include<string.h>
#include<stdio.h>
#include<stdlib.h>
#include<unistd.h>
#include<errno.h>
#include<string.h>
//opencv libraries
#include "opencv2/objdetect.hpp"
#include "opencv2/highgui.hpp"
#include "opencv2/imgproc.hpp"
//socket libraries
#include<sys/types.h> 
#include<sys/socket.h>
#include<netinet/in.h>
#include<arpa/inet.h>
#include<netdb.h>
#define SERVERPORT "4950"   // the port users will be connecting to


using namespace std;
using namespace cv;

void detectAndDisplay( Mat frame , Mat frame_edit);
frame_edit);// function for detecting face
int talker(Mat frame);
/** Global variables */
String face_cascade_name = "lbpcascade_frontalface.xml";
String eyes_cascade_name = "haarcascade_eye_tree_eyeglasses.xml";
CascadeClassifier face_cascade;
CascadeClassifier eyes_cascade;


/** @function main */
int main( void ) 
{
VideoCapture capture;
Mat frame;
if( !face_cascade.load( face_cascade_name ) ){ printf("--(!)Error loading //body for image capture from camera 
 .
 .
 .
    //calling face cascade\n"); return -1; };
if( !eyes_cascade.load( eyes_cascade_name ) ){ printf("--(!)Error loading eyes cascade\n"); return -1; };
//-- 2. Read the video stream
capture.open( 1 );
if ( ! capture.isOpened() ) { printf("--(!)Error opening video capture\n"); return -1; }
capture.read(frame);
Mat frame_edit(frame.rows+100,frame.cols,CV_8UC3,Scalar::all(0));
while ( capture.read(frame) )
{
        if( frame.empty() )
        {
            printf(" --(!) No captured frame -- Break!");
            break;
        }
int k=100;
for(int i=0;i<frame.rows;i++)
{
    uchar* data=frame.ptr<uchar>(i);
    uchar* data1=frame_edit.ptr<uchar>(k);
    for(int l=0;l<frame.cols*frame.channels();l++)
        {   
            data1[l]=data[l];
        }
    k++;
}
    //-- 3. show frames
detect function
detectAndDisplay( frame_edit, frame );
    int c = waitKey(5);
    if( (char)c == 27 ) { break; } // escape
}
return 0;
}

/**
 * @function detectAndDisplay
 */
 void detectAndDisplay( Mat frame ,Mat frame_edit)
{
std::vector<Rect> faces;
Mat frame_gray;
cvtColor( frame_edit, frame_gray, COLOR_BGR2GRAY );
equalizeHist( frame_gray, frame_gray );
//-- Detect faces
face_cascade.detectMultiScale( frame_gray, faces, 1.1, 2, 0, Size(80, 80) );
for( size_t i = 0; i < faces.size(); i++ )
{
        Mat faceROI = frame_gray( faces[i] );
        std::vector<Rect> eyes;
    //-- In each face, detect eyes
        eyes_cascade.detectMultiScale( faceROI, eyes, 1.1, 2, 0 |CASCADE_SCALE_IMAGE, Size(30, 30) );
        if( eyes.size() == 2)
        {
        //-- Draw the // function body for detecting face
            Point edge1(faces[i].x,faces[i].y+100),edge2(faces[i].x+faces[i].width,faces[i].y+faces[i].height+100);
            rectangle(frame,edge1,edge2,Scalar(255,255,255),1,8);

        }
}
putText(frame,"Signal level:",Point(0,50),1,2,Scalar::all(255),2);
putText(frame,"Battery Remaining:",Point(280,50),1,2,Scalar::all(255),2);
//-- Show what you got
.

imshow( "face detection client", frame );

    //calling talker to sent the data..
int ret=talker(frame);
 if(ret==1||ret==2)
{
    cout<<"error";
    exit(1);
}   
}

int talker(Mat frame)

{
int sockfd;
const char *argv[1]={"localhost"};
struct addrinfo hints, *servinfo, *p;
int rv;
int numbytes;
frame = (frame.reshape(0,1)); // to make it continuous
int  imgSize = frame.total()*frame.elemSize();  
memset(&hints, 0, sizeof hints);
hints.ai_family = AF_UNSPEC;
hints.ai_socktype = SOCK_DGRAM;

if ((rv = getaddrinfo(argv[1], SERVERPORT, &hints, &servinfo)) != 0) 
{
    fprintf(stderr, "getaddrinfo: %s\n", gai_strerror(rv));
    return 1;
}
    // loop through all the results and make a socket

for(p = servinfo; p != NULL; p = p->ai_next) 
{
    if ((sockfd = socket(p->ai_family, p->ai_socktype,p->ai_protocol)) == -1) 
    {
        perror("talker: socket");
        continue;
    }
break;
}
if (p == NULL)
{
    fprintf(stderr, "talker: failed to bind socket\n");
    return 2;
}
if ((numbytes = sendto(sockfd, frame.data, imgSize, 0,p->ai_addr, p->ai_addrlen)) == -1)
{
    perror("talker: sendto");
    exit(1);
}
freeaddrinfo(servinfo);
cout<<"senting frame succesfull"<<endl;
close(sockfd);
return 0;
}