Ask Your Question

Revision history [back]

Number of people counting inside/outside?

Hello. How to calculate how many people are in the shop a lot ? Came out 7 people left 5 people in the car left 2 people.

Could you tell me how it can be implemented?

bool checkIfBlobsCrossedTheLine(std::vector<blob> &blobs, int &intVerticalLinePosition, int &carCountL, int &carCountR, std::ofstream &myfile) { bool blnAtLeastOneBlobCrossedTheLine = 0;

for (auto blob : blobs) {

    if (blob.blnStillBeingTracked == true && blob.centerPositions.size() >= 2) {
        int prevFrameIndex = (int)blob.centerPositions.size() - 2;
        int currFrameIndex = (int)blob.centerPositions.size() - 1;

        //going left
        if (blob.centerPositions[prevFrameIndex].x > intVerticalLinePosition && blob.centerPositions[currFrameIndex].x <= intVerticalLinePosition) {
            carCountL++;

            time_t now = time(0);
            char* dt = strtok(ctime(&now), "\n");;
            std::cout << dt << ",1,0 (Left)" << std::endl;
            myfile << dt << ",1,0" << std::endl;
            blnAtLeastOneBlobCrossedTheLine = 1;
        }

        // going right
        if (blob.centerPositions[prevFrameIndex].x < intVerticalLinePosition && blob.centerPositions[currFrameIndex].x >= intVerticalLinePosition) {
            carCountR++;

            time_t now = time(0);
            char* dt = strtok(ctime(&now), "\n");;
            std::cout << dt << ",0,1 (Right)" << std::endl;
            myfile << dt << ",0,1" << std::endl;
            blnAtLeastOneBlobCrossedTheLine = 2;
        }

    }

}

return blnAtLeastOneBlobCrossedTheLine;

}

Number of people counting inside/outside?

Hello. How to calculate how many people are in the shop a lot ? Came out 7 people left 5 people in the car shop left 2 people.

Could you tell me how it can be implemented?

bool checkIfBlobsCrossedTheLine(std::vector<blob> &blobs, int &intVerticalLinePosition, int &carCountL, &shopCountL, int &carCountR, &shopCountR, std::ofstream &myfile) { bool blnAtLeastOneBlobCrossedTheLine = 0;

for (auto (shop blob : blobs) {

    if (blob.blnStillBeingTracked == true && blob.centerPositions.size() >= 2) {
        int prevFrameIndex = (int)blob.centerPositions.size() - 2;
        int currFrameIndex = (int)blob.centerPositions.size() - 1;

        //going left
        if (blob.centerPositions[prevFrameIndex].x > intVerticalLinePosition && blob.centerPositions[currFrameIndex].x <= intVerticalLinePosition) {
            carCountL++;
shopCountL++;

            time_t now = time(0);
            char* dt = strtok(ctime(&now), "\n");;
            std::cout << dt << ",1,0 (Left)" << std::endl;
            myfile << dt << ",1,0" << std::endl;
            blnAtLeastOneBlobCrossedTheLine = 1;
        }

        // going right
        if (blob.centerPositions[prevFrameIndex].x < intVerticalLinePosition && blob.centerPositions[currFrameIndex].x >= intVerticalLinePosition) {
            carCountR++;
shopCountR++;

            time_t now = time(0);
            char* dt = strtok(ctime(&now), "\n");;
            std::cout << dt << ",0,1 (Right)" << std::endl;
            myfile << dt << ",0,1" << std::endl;
            blnAtLeastOneBlobCrossedTheLine = 2;
        }

    }

}

return blnAtLeastOneBlobCrossedTheLine;

}

Number How to calculate number of people counting inside/outside?inside/outside in shop?

Hello. How to calculate how many people are in the shop a lot ? Came out 7 people left 5 people in the shop left 2 people.

Could you tell me how it can be implemented?

bool checkIfBlobsCrossedTheLine(std::vector<blob> &blobs, int &intVerticalLinePosition, int &shopCountL, int &shopCountR, std::ofstream &myfile) { bool blnAtLeastOneBlobCrossedTheLine = 0;

for (shop blob : blobs) {

    if (blob.blnStillBeingTracked == true && blob.centerPositions.size() >= 2) {
        int prevFrameIndex = (int)blob.centerPositions.size() - 2;
        int currFrameIndex = (int)blob.centerPositions.size() - 1;

        //going left
        if (blob.centerPositions[prevFrameIndex].x > intVerticalLinePosition && blob.centerPositions[currFrameIndex].x <= intVerticalLinePosition) {
            shopCountL++;

            time_t now = time(0);
            char* dt = strtok(ctime(&now), "\n");;
            std::cout << dt << ",1,0 (Left)" << std::endl;
            myfile << dt << ",1,0" << std::endl;
            blnAtLeastOneBlobCrossedTheLine = 1;
        }

        // going right
        if (blob.centerPositions[prevFrameIndex].x < intVerticalLinePosition && blob.centerPositions[currFrameIndex].x >= intVerticalLinePosition) {
            shopCountR++;

            time_t now = time(0);
            char* dt = strtok(ctime(&now), "\n");;
            std::cout << dt << ",0,1 (Right)" << std::endl;
            myfile << dt << ",0,1" << std::endl;
            blnAtLeastOneBlobCrossedTheLine = 2;
        }

    }

}

return blnAtLeastOneBlobCrossedTheLine;

}

How to calculate number of people counting inside/outside in shop?

Hello. How to calculate how many people are in the shop a lot ? Came out 7 people left 5 people in the shop left 2 people.

Could you tell me how it can be implemented?


// main.cpp //вкратце: //Для каждых двух идущих друг за другом кадров //1) делаем картинку серой - cvtColor //2) применяем на изображении размытие(по Гауссу) - GaussianBlur //3) ищем скалярную разницу между идущим и послеидущим кадром - absdiff //4) пороговое преобразование - threshold //5) растягиваем - dilate - 2 раза и сужаем - erode - один раз //6) находим контуры - findContours //7) проводим выпуклый контур по контурам из п. 6 - Convex Hull.Получаем "сгустки" (blob)белого цвета - это машины и иные объекты //8) с помощью самописной функции matchCurrentFrameBlobsToExistingBlobs проверяем, попадает ли наш сгусток под классификацию машины - по определённым параметрам : расстояние от камеры до дороги, примерное соотношение размера машины к дороге. //9) и, наконец, с помощью самописной функции checkIfBlobsCrossedTheLine - проверяем, пересек ли сгусток позицию красной линии(intHorizontalLinePosition) : //blob.centerPositions.get(prevFrameIndex).y > intHorizontalLinePosition && blob.centerPositions.get(currFrameIndex).y <= intHorizontalLinePosition - поток ОТ НАС // //blob.centerPositions.get(prevFrameIndex).y <= intHorizontalLinePosition && blob.centerPositions.get(currFrameIndex).y > intHorizontalLinePosition - поток НА НАС // //При различных настройках параметров в функциях в п.8 и п.9.можно подсчитывать количество машин с разной позиции камеры.Можно даже ограничивать горизонтальную линию лишь по одной полосе, например: //blob.centerPositions.get(prevFrameIndex).y > intHorizontalLinePosition && blob.centerPositions.get(currFrameIndex).y <= intHorizontalLinePosition && //blob.centerPositions.get(currFrameIndex).x >= 360 && //blob.centerPositions.get(currFrameIndex).x <= 620

enter code here

include<opencv2 core="" core.hpp="">

include<opencv2 highgui="" highgui.hpp="">

include<opencv2 imgproc="" imgproc.hpp="">

include<iostream>

include<conio.h> // it may be necessary to change or remove this line if not using Windows

include <fstream> // file utils

include <ctime> // timestamp stuff

include "Blob.h"

define SHOW_STEPS // un-comment or comment this line to show steps or not

define FRAME_SCALE 1 // divide frame dimentions by this number

// global variables /////////////////////////////////////////////////////////////////////////////// const cv::Scalar SCALAR_BLACK = cv::Scalar(0.0, 0.0, 0.0); const cv::Scalar SCALAR_WHITE = cv::Scalar(255.0, 255.0, 255.0); const cv::Scalar SCALAR_YELLOW = cv::Scalar(0.0, 255.0, 255.0); const cv::Scalar SCALAR_GREEN = cv::Scalar(0.0, 200.0, 0.0); const cv::Scalar SCALAR_RED = cv::Scalar(0.0, 0.0, 255.0); const cv::Scalar SCALAR_BLUE = cv::Scalar(255.0, 0.0, 0.0);

// function prototypes //////////////////////////////////////////////////////////////////////////// void matchCurrentFrameBlobsToExistingBlobs(std::vector<blob> &existingBlobs, std::vector<blob> &currentFrameBlobs); void addBlobToExistingBlobs(Blob &currentFrameBlob, std::vector<blob> &existingBlobs, int &intIndex); void addNewBlob(Blob &currentFrameBlob, std::vector<blob> &existingBlobs); double distanceBetweenPoints(cv::Point point1, cv::Point point2); void drawAndShowContours(cv::Size imageSize, std::vector<std::vector<cv::point> > contours, std::string strImageName); void drawAndShowContours(cv::Size imageSize, std::vector<blob> blobs, std::string strImageName); bool checkIfBlobsCrossedTheLine(std::vector<blob> &blobs, int &intVerticalLinePosition, int &shopCountL, &carCountL, int &shopCountR, &carCountR, std::ofstream &myfile) { &myfile); void drawBlobInfoOnImage(std::vector<blob> &blobs, cv::Mat &imgFrame2Copy); void drawCarCountOnImage(int &carCountL, int &carCountR, cv::Mat &imgFrame2Copy);

/////////////////////////////////////////////////////////////////////////////////////////////////// int main(void) {

cv::VideoCapture capVideo;
std::ofstream myfile; // log file

cv::Mat imgFrame1;
cv::Mat imgFrame2;
cv::Mat imgFrame1L;
cv::Mat imgFrame2L;

std::vector<Blob> blobs;

cv::Point crossingLine[2];

int carCountL = 0;
int carCountR = 0;

capVideo.open("input1_2.MOV");
//capVideo.open("rtsp://192.168.1.254/sjcam.mov");
//capVideo.open(1);

// log file
myfile.open("/tmp/OpenCV-" + std::string() + "-" + std::to_string(time(0)) + ".txt");
std::cout << "Logging to: \"/tmp/OpenCV-" << "-" << std::to_string(time(0)) << ".txt\"" << std::endl;

myfile << "\"Timestamp\",\"Left\",\"Right\"" << std::endl;

if (!capVideo.isOpened()) {                                                 // if unable to open video file
    std::cout << "error reading video file" << std::endl << std::endl;      // show error message
    _getch();                   // it may be necessary to change or remove this line if not using Windows
    return(0);                                                              // and exit program
}

if (capVideo.get(CV_CAP_PROP_FRAME_COUNT) < 2) {
    std::cout << "error: video file must have at least two frames";
    _getch();                   // it may be necessary to change or remove this line if not using Windows
    return(0);
}

capVideo.read(imgFrame1L);
capVideo.read(imgFrame2L);

resize(imgFrame1L, imgFrame1, cv::Size(imgFrame1L.size().width / FRAME_SCALE, imgFrame1L.size().height / FRAME_SCALE));
resize(imgFrame2L, imgFrame2, cv::Size(imgFrame2L.size().width / FRAME_SCALE, imgFrame2L.size().height / FRAME_SCALE));


//int intHorizontalLinePosition = (int)std::round((double)imgFrame1.rows * 0.35);
int intVerticalLinePosition = (int)std::round((double)imgFrame1.cols * 0.50);

crossingLine[0].y = 0;
crossingLine[0].x = intVerticalLinePosition;

crossingLine[1].y = imgFrame1.rows - 1;
crossingLine[1].x = intVerticalLinePosition;

char chCheckForEscKey = 0;

bool blnFirstFrame = true;

int frameCount = 2;

while (capVideo.isOpened() && chCheckForEscKey != 27) {

    std::vector<Blob> currentFrameBlobs;

    cv::Mat imgFrame1Copy = imgFrame1.clone();
    cv::Mat imgFrame2Copy = imgFrame2.clone();

    cv::Mat imgDifference;
    cv::Mat imgThresh;

    cv::cvtColor(imgFrame1Copy, imgFrame1Copy, CV_BGR2GRAY);
    cv::cvtColor(imgFrame2Copy, imgFrame2Copy, CV_BGR2GRAY);

    cv::GaussianBlur(imgFrame1Copy, imgFrame1Copy, cv::Size(5, 5), 0);
    cv::GaussianBlur(imgFrame2Copy, imgFrame2Copy, cv::Size(5, 5), 0);

    cv::absdiff(imgFrame1Copy, imgFrame2Copy, imgDifference);

    cv::threshold(imgDifference, imgThresh, 30, 255.0, CV_THRESH_BINARY);

    //cv::imshow("imgThresh", imgThresh);

    cv::Mat structuringElement3x3 = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(3, 3));
    cv::Mat structuringElement5x5 = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(5, 5));
    cv::Mat structuringElement7x7 = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(7, 7));
    cv::Mat structuringElement15x15 = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(15, 15));

    for (unsigned int i = 0; i < 2; i++) {
        cv::dilate(imgThresh, imgThresh, structuringElement5x5);
        cv::dilate(imgThresh, imgThresh, structuringElement5x5);
        cv::erode(imgThresh, imgThresh, structuringElement5x5);
    }

    cv::Mat imgThreshCopy = imgThresh.clone();

    std::vector<std::vector<cv::Point> > contours;

    cv::findContours(imgThreshCopy, contours, cv::RETR_EXTERNAL, cv::CHAIN_APPROX_SIMPLE);

    drawAndShowContours(imgThresh.size(), contours, "imgContours");

    std::vector<std::vector<cv::Point> > convexHulls(contours.size());

    for (unsigned int i = 0; i < contours.size(); i++) {
        cv::convexHull(contours[i], convexHulls[i]);
    }

    drawAndShowContours(imgThresh.size(), convexHulls, "imgConvexHulls");

    for (auto &convexHull : convexHulls) {
        Blob possibleBlob(convexHull);

        if (possibleBlob.currentBoundingRect.area() > 800 &&
            possibleBlob.dblCurrentAspectRatio > 0.2 &&
            possibleBlob.dblCurrentAspectRatio < 4.0 &&
            possibleBlob.currentBoundingRect.width > 40 &&
            possibleBlob.currentBoundingRect.height > 40 &&
            possibleBlob.dblCurrentDiagonalSize > 90.0 &&
            (cv::contourArea(possibleBlob.currentContour) / (double)possibleBlob.currentBoundingRect.area()) > 0.50) {
            currentFrameBlobs.push_back(possibleBlob);
        }
    }

    drawAndShowContours(imgThresh.size(), currentFrameBlobs, "imgCurrentFrameBlobs");

    if (blnFirstFrame == true) {
        for (auto &currentFrameBlob : currentFrameBlobs) {
            blobs.push_back(currentFrameBlob);
        }
    }
    else {
        matchCurrentFrameBlobsToExistingBlobs(blobs, currentFrameBlobs);
    }

    drawAndShowContours(imgThresh.size(), blobs, "imgBlobs");

    imgFrame2Copy = imgFrame2.clone();          // get another copy of frame 2 since we changed the previous frame 2 copy in the processing above

    drawBlobInfoOnImage(blobs, imgFrame2Copy);

    int blnAtLeastOneBlobCrossedTheLine = 0;checkIfBlobsCrossedTheLine(blobs, intVerticalLinePosition, carCountL, carCountR, myfile);

    if (blnAtLeastOneBlobCrossedTheLine == 1) {
        cv::line(imgFrame2Copy, crossingLine[0], crossingLine[1], SCALAR_GREEN, 2);
    }
    else if (blnAtLeastOneBlobCrossedTheLine == 2) {
        cv::line(imgFrame2Copy, crossingLine[0], crossingLine[1], SCALAR_YELLOW, 2);
    }
    else {
        cv::line(imgFrame2Copy, crossingLine[0], crossingLine[1], SCALAR_BLUE, 2);
    }

    drawCarCountOnImage(carCountL, carCountR, imgFrame2Copy);

    cv::imshow("People_Counting_Cross_Line", imgFrame2Copy);

    //cv::waitKey(0);                 // uncomment this line to go frame by frame for debugging

    // now we prepare for the next iteration

    currentFrameBlobs.clear();

    imgFrame1 = imgFrame2.clone();           // move frame 1 up to where frame 2 is
    capVideo.read(imgFrame2);
    if ((capVideo.get(CV_CAP_PROP_POS_FRAMES) + 1) < capVideo.get(CV_CAP_PROP_FRAME_COUNT)) {
        capVideo.read(imgFrame2L);
        resize(imgFrame2L, imgFrame2, cv::Size(imgFrame2L.size().width / FRAME_SCALE, imgFrame2L.size().height / FRAME_SCALE));
    }
    else {
        time_t now = time(0);
        char* dt = strtok(ctime(&now), "\n");;
        std::cout << dt << ",EOF" << std::endl;
        return(0); // end?
    }

    blnFirstFrame = false;
    frameCount++;
    chCheckForEscKey = cv::waitKey(1);
}

if (chCheckForEscKey != 27) {               // if the user did not press esc (i.e. we reached the end of the video)
    cv::waitKey(0);                         // hold the windows open to allow the "end of video" message to show
}
// note that if the user did press esc, we don't need to hold the windows open, we can simply let the program end which will close the windows

return(0);

}

/////////////////////////////////////////////////////////////////////////////////////////////////// void matchCurrentFrameBlobsToExistingBlobs(std::vector<blob> &existingBlobs, std::vector<blob> &currentFrameBlobs) {

for (shop (auto &existingBlob : existingBlobs) {

    existingBlob.blnCurrentMatchFoundOrNewBlob = false;

    existingBlob.predictNextPosition();
}

for (auto &currentFrameBlob : currentFrameBlobs) {

    int intIndexOfLeastDistance = 0;
    double dblLeastDistance = 100000.;

    for (unsigned int i = 0; i < existingBlobs.size(); i++) {

        if (existingBlobs[i].blnStillBeingTracked == true) {

            double dblDistance = distanceBetweenPoints(currentFrameBlob.centerPositions.back(), existingBlobs[i].predictedNextPosition);

            if (dblDistance < dblLeastDistance) {
                dblLeastDistance = dblDistance;
                intIndexOfLeastDistance = i;
            }
        }
    }

    if (dblLeastDistance < currentFrameBlob.dblCurrentDiagonalSize * 0.5) {
        addBlobToExistingBlobs(currentFrameBlob, existingBlobs, intIndexOfLeastDistance);
    }
    else {
        addNewBlob(currentFrameBlob, existingBlobs);
    }

}

for (auto &existingBlob : existingBlobs) {

    if (existingBlob.blnCurrentMatchFoundOrNewBlob == false) {
        existingBlob.intNumOfConsecutiveFramesWithoutAMatch++;
    }

    if (existingBlob.intNumOfConsecutiveFramesWithoutAMatch >= 5) {
        existingBlob.blnStillBeingTracked = false;
    }

}

}

/////////////////////////////////////////////////////////////////////////////////////////////////// void addBlobToExistingBlobs(Blob &currentFrameBlob, std::vector<blob> &existingBlobs, int &intIndex) {

existingBlobs[intIndex].currentContour = currentFrameBlob.currentContour;
existingBlobs[intIndex].currentBoundingRect = currentFrameBlob.currentBoundingRect;

existingBlobs[intIndex].centerPositions.push_back(currentFrameBlob.centerPositions.back());

existingBlobs[intIndex].dblCurrentDiagonalSize = currentFrameBlob.dblCurrentDiagonalSize;
existingBlobs[intIndex].dblCurrentAspectRatio = currentFrameBlob.dblCurrentAspectRatio;

existingBlobs[intIndex].blnStillBeingTracked = true;
existingBlobs[intIndex].blnCurrentMatchFoundOrNewBlob = true;

}

/////////////////////////////////////////////////////////////////////////////////////////////////// void addNewBlob(Blob &currentFrameBlob, std::vector<blob> &existingBlobs) {

currentFrameBlob.blnCurrentMatchFoundOrNewBlob = true;

existingBlobs.push_back(currentFrameBlob);

}

/////////////////////////////////////////////////////////////////////////////////////////////////// double distanceBetweenPoints(cv::Point point1, cv::Point point2) {

int intX = abs(point1.x - point2.x);
int intY = abs(point1.y - point2.y);

return(sqrt(pow(intX, 2) + pow(intY, 2)));

}

/////////////////////////////////////////////////////////////////////////////////////////////////// void drawAndShowContours(cv::Size imageSize, std::vector<std::vector<cv::point> > contours, std::string strImageName) { cv::Mat image(imageSize, CV_8UC3, SCALAR_BLACK);

cv::drawContours(image, contours, -1, SCALAR_WHITE, -1);

//cv::imshow(strImageName, image);

}

/////////////////////////////////////////////////////////////////////////////////////////////////// void drawAndShowContours(cv::Size imageSize, std::vector<blob> blobs, std::string strImageName) {

cv::Mat image(imageSize, CV_8UC3, SCALAR_BLACK);

std::vector<std::vector<cv::Point> > contours;

for (auto &blob : blobs) {
    if (blob.blnStillBeingTracked == true) {
        contours.push_back(blob.currentContour);
    }
}

cv::drawContours(image, contours, -1, SCALAR_WHITE, -1);

//cv::imshow(strImageName, image);

}

/////////////////////////////////////////////////////////////////////////////////////////////////// bool checkIfBlobsCrossedTheLine(std::vector<blob> &blobs, int &intVerticalLinePosition, int &carCountL, int &carCountR, std::ofstream &myfile) { bool blnAtLeastOneBlobCrossedTheLine = 0;

for (auto blob : blobs) {

    if (blob.blnStillBeingTracked == true && blob.centerPositions.size() >= 2) {
        int prevFrameIndex = (int)blob.centerPositions.size() - 2;
        int currFrameIndex = (int)blob.centerPositions.size() - 1;

        //going left
        if (blob.centerPositions[prevFrameIndex].x > intVerticalLinePosition && blob.centerPositions[currFrameIndex].x <= intVerticalLinePosition) {
            shopCountL++;
carCountL++;

            time_t now = time(0);
            char* dt = strtok(ctime(&now), "\n");;
            std::cout << dt << ",1,0 (Left)" << std::endl;
            myfile << dt << ",1,0" << std::endl;
            blnAtLeastOneBlobCrossedTheLine = 1;
        }

        // going right
        if (blob.centerPositions[prevFrameIndex].x < intVerticalLinePosition && blob.centerPositions[currFrameIndex].x >= intVerticalLinePosition) {
            shopCountR++;
carCountR++;

            time_t now = time(0);
            char* dt = strtok(ctime(&now), "\n");;
            std::cout << dt << ",0,1 (Right)" << std::endl;
            myfile << dt << ",0,1" << std::endl;
            blnAtLeastOneBlobCrossedTheLine = 2;
        }

    }

}

return blnAtLeastOneBlobCrossedTheLine;

}

/////////////////////////////////////////////////////////////////////////////////////////////////// void drawBlobInfoOnImage(std::vector<blob> &blobs, cv::Mat &imgFrame2Copy) {

for (unsigned int i = 0; i < blobs.size(); i++) {

    if (blobs[i].blnStillBeingTracked == true) {
        cv::rectangle(imgFrame2Copy, blobs[i].currentBoundingRect, SCALAR_RED, 2);

        int intFontFace = CV_FONT_HERSHEY_SIMPLEX;
        double dblFontScale = blobs[i].dblCurrentDiagonalSize / 60.0;
        int intFontThickness = (int)std::round(dblFontScale * 1.0);

        cv::putText(imgFrame2Copy, std::to_string(i), blobs[i].centerPositions.back(), intFontFace, dblFontScale, SCALAR_GREEN, intFontThickness);
    }
}

}

/////////////////////////////////////////////////////////////////////////////////////////////////// void drawCarCountOnImage(int &carCountL, int &carCountR, cv::Mat &imgFrame2Copy) {

int intFontFace = CV_FONT_HERSHEY_SIMPLEX;
double dblFontScale = (imgFrame2Copy.rows * imgFrame2Copy.cols) / 200000.0;
int intFontThickness = (int)std::round(dblFontScale * 1.5);

cv::Size textSizeL = cv::getTextSize("Inside: " + std::to_string(carCountL), intFontFace, dblFontScale, intFontThickness, 0);
cv::Size textSizeR = cv::getTextSize("Outside: " + std::to_string(carCountR), intFontFace, dblFontScale, intFontThickness, 0);

cv::Point ptTextBottomLeftPositionL, ptTextBottomLeftPositionR;

ptTextBottomLeftPositionL.x = imgFrame2Copy.cols - 1 - (int)((double)textSizeL.width * 1.25);
ptTextBottomLeftPositionL.y = (int)((double)textSizeL.height * 1.25);

ptTextBottomLeftPositionR.x = ptTextBottomLeftPositionL.x - 350;
ptTextBottomLeftPositionR.y = ptTextBottomLeftPositionL.y + (textSizeL.height) * 1.25;

cv::putText(imgFrame2Copy, "Inside: " + std::to_string(carCountL), ptTextBottomLeftPositionL, intFontFace, dblFontScale, SCALAR_GREEN, intFontThickness);
cv::putText(imgFrame2Copy, "Outside: " + std::to_string(carCountR), ptTextBottomLeftPositionR, intFontFace, dblFontScale, SCALAR_YELLOW, intFontThickness);

}

How to calculate number of people counting inside/outside in shop?

Hello. How to calculate how many people are in the shop a lot ? Came out 7 people left 5 people in the shop left 2 people.

Could you tell me how it can be implemented?


// main.cpp //вкратце: //Для каждых двух идущих друг за другом кадров //1) делаем картинку серой - cvtColor //2) применяем на изображении размытие(по Гауссу) - GaussianBlur //3) ищем скалярную разницу между идущим и послеидущим кадром - absdiff //4) пороговое преобразование - threshold //5) растягиваем - dilate - 2 раза и сужаем - erode - один раз //6) находим контуры - findContours //7) проводим выпуклый контур по контурам из п. 6 - Convex Hull.Получаем "сгустки" (blob)белого цвета - это машины и иные объекты //8) с помощью самописной функции matchCurrentFrameBlobsToExistingBlobs проверяем, попадает ли наш сгусток под классификацию машины - по определённым параметрам : расстояние от камеры до дороги, примерное соотношение размера машины к дороге. //9) и, наконец, с помощью самописной функции checkIfBlobsCrossedTheLine - проверяем, пересек ли сгусток позицию красной линии(intHorizontalLinePosition) : //blob.centerPositions.get(prevFrameIndex).y > intHorizontalLinePosition && blob.centerPositions.get(currFrameIndex).y <= intHorizontalLinePosition - поток ОТ НАС // //blob.centerPositions.get(prevFrameIndex).y <= intHorizontalLinePosition && blob.centerPositions.get(currFrameIndex).y > intHorizontalLinePosition - поток НА НАС // //При различных настройках параметров в функциях в п.8 и п.9.можно подсчитывать количество машин с разной позиции камеры.Можно даже ограничивать горизонтальную линию лишь по одной полосе, например: //blob.centerPositions.get(prevFrameIndex).y > intHorizontalLinePosition && blob.centerPositions.get(currFrameIndex).y <= intHorizontalLinePosition && //blob.centerPositions.get(currFrameIndex).x >= 360 && //blob.centerPositions.get(currFrameIndex).x <= 620

enter code here

Heading

include<opencv2 core="" core.hpp="">

include<opencv2 highgui="" highgui.hpp="">

include<opencv2 imgproc="" imgproc.hpp="">

include<iostream>

include<conio.h> // it may be necessary to change or remove this line if not using Windows

include <fstream> // file utils

include <ctime> // timestamp stuff

include "Blob.h"

define SHOW_STEPS // un-comment or comment this line to show steps or not

define FRAME_SCALE 1 // divide frame dimentions by this number

// global variables /////////////////////////////////////////////////////////////////////////////// const cv::Scalar SCALAR_BLACK = cv::Scalar(0.0, 0.0, 0.0); const cv::Scalar SCALAR_WHITE = cv::Scalar(255.0, 255.0, 255.0); const cv::Scalar SCALAR_YELLOW = cv::Scalar(0.0, 255.0, 255.0); const cv::Scalar SCALAR_GREEN = cv::Scalar(0.0, 200.0, 0.0); const cv::Scalar SCALAR_RED = cv::Scalar(0.0, 0.0, 255.0); const cv::Scalar SCALAR_BLUE = cv::Scalar(255.0, 0.0, 0.0);

0.0); // function prototypes //////////////////////////////////////////////////////////////////////////// void matchCurrentFrameBlobsToExistingBlobs(std::vector<blob> &existingBlobs, std::vector<blob> &currentFrameBlobs); void addBlobToExistingBlobs(Blob &currentFrameBlob, std::vector<blob> &existingBlobs, int &intIndex); void addNewBlob(Blob &currentFrameBlob, std::vector<blob> &existingBlobs); double distanceBetweenPoints(cv::Point point1, cv::Point point2); void drawAndShowContours(cv::Size imageSize, std::vector<std::vector<cv::point> > contours, std::string strImageName); void drawAndShowContours(cv::Size imageSize, std::vector<blob> blobs, std::string strImageName); bool checkIfBlobsCrossedTheLine(std::vector<blob> &blobs, int &intVerticalLinePosition, int &carCountL, int &carCountR, std::ofstream &myfile); void drawBlobInfoOnImage(std::vector<blob> &blobs, cv::Mat &imgFrame2Copy); void drawCarCountOnImage(int &carCountL, int &carCountR, cv::Mat &imgFrame2Copy);

&imgFrame2Copy); /////////////////////////////////////////////////////////////////////////////////////////////////// int main(void) {

{
    cv::VideoCapture capVideo;
 std::ofstream myfile; // log file
  cv::Mat imgFrame1;
 cv::Mat imgFrame2;
 cv::Mat imgFrame1L;
 cv::Mat imgFrame2L;

std::vector<Blob>     std::vector<blob> blobs;
  cv::Point crossingLine[2];
  int carCountL = 0;
 int carCountR = 0;
  capVideo.open("input1_2.MOV");
 //capVideo.open("rtsp://192.168.1.254/sjcam.mov");
//capVideo.open(1);

    //capVideo.open(1);

// log file
myfile.open("/tmp/OpenCV-" + std::string() + "-" + std::to_string(time(0)) + ".txt");
std::cout << "Logging to: \"/tmp/OpenCV-" << "-" << std::to_string(time(0)) << ".txt\"" << std::endl;
 myfile << "\"Timestamp\",\"Left\",\"Right\"" << std::endl;
 if (!capVideo.isOpened()) {                                                 // if unable to open video file
    std::cout << "error reading video file" << std::endl << std::endl;      // show error message
    _getch();                   // it may be necessary to change or remove this line if not using Windows
    return(0);                                                              // and exit program
}
 if (capVideo.get(CV_CAP_PROP_FRAME_COUNT) < 2) {
    std::cout << "error: video file must have at least two frames";
    _getch();                   // it may be necessary to change or remove this line if not using Windows
    return(0);
}
 capVideo.read(imgFrame1L);
capVideo.read(imgFrame2L);
 resize(imgFrame1L, imgFrame1, cv::Size(imgFrame1L.size().width / FRAME_SCALE, imgFrame1L.size().height / FRAME_SCALE));
resize(imgFrame2L, imgFrame2, cv::Size(imgFrame2L.size().width / FRAME_SCALE, imgFrame2L.size().height / FRAME_SCALE));
 //int intHorizontalLinePosition = (int)std::round((double)imgFrame1.rows * 0.35);
int intVerticalLinePosition = (int)std::round((double)imgFrame1.cols * 0.50);
 crossingLine[0].y = 0;
crossingLine[0].x = intVerticalLinePosition;
 crossingLine[1].y = imgFrame1.rows - 1;
crossingLine[1].x = intVerticalLinePosition;
 char chCheckForEscKey = 0;
 bool blnFirstFrame = true;
 int frameCount = 2;
 while (capVideo.isOpened() && chCheckForEscKey != 27) {
     std::vector<Blob> currentFrameBlobs;
     cv::Mat imgFrame1Copy = imgFrame1.clone();
    cv::Mat imgFrame2Copy = imgFrame2.clone();
     cv::Mat imgDifference;
    cv::Mat imgThresh;
     cv::cvtColor(imgFrame1Copy, imgFrame1Copy, CV_BGR2GRAY);
    cv::cvtColor(imgFrame2Copy, imgFrame2Copy, CV_BGR2GRAY);
     cv::GaussianBlur(imgFrame1Copy, imgFrame1Copy, cv::Size(5, 5), 0);
    cv::GaussianBlur(imgFrame2Copy, imgFrame2Copy, cv::Size(5, 5), 0);
     cv::absdiff(imgFrame1Copy, imgFrame2Copy, imgDifference);
     cv::threshold(imgDifference, imgThresh, 30, 255.0, CV_THRESH_BINARY);
     //cv::imshow("imgThresh", imgThresh);
     cv::Mat structuringElement3x3 = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(3, 3));
    cv::Mat structuringElement5x5 = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(5, 5));
    cv::Mat structuringElement7x7 = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(7, 7));
    cv::Mat structuringElement15x15 = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(15, 15));
     for (unsigned int i = 0; i < 2; i++) {
        cv::dilate(imgThresh, imgThresh, structuringElement5x5);
        cv::dilate(imgThresh, imgThresh, structuringElement5x5);
        cv::erode(imgThresh, imgThresh, structuringElement5x5);
    }
     cv::Mat imgThreshCopy = imgThresh.clone();
     std::vector<std::vector<cv::Point> > contours;
     cv::findContours(imgThreshCopy, contours, cv::RETR_EXTERNAL, cv::CHAIN_APPROX_SIMPLE);
     drawAndShowContours(imgThresh.size(), contours, "imgContours");
     std::vector<std::vector<cv::Point> > convexHulls(contours.size());
     for (unsigned int i = 0; i < contours.size(); i++) {
        cv::convexHull(contours[i], convexHulls[i]);
    }
     drawAndShowContours(imgThresh.size(), convexHulls, "imgConvexHulls");
     for (auto &convexHull : convexHulls) {
        Blob possibleBlob(convexHull);
         if (possibleBlob.currentBoundingRect.area() > 800 &&
            possibleBlob.dblCurrentAspectRatio > 0.2 &&
            possibleBlob.dblCurrentAspectRatio < 4.0 &&
            possibleBlob.currentBoundingRect.width > 40 &&
            possibleBlob.currentBoundingRect.height > 40 &&
            possibleBlob.dblCurrentDiagonalSize > 90.0 &&
            (cv::contourArea(possibleBlob.currentContour) / (double)possibleBlob.currentBoundingRect.area()) > 0.50) {
            currentFrameBlobs.push_back(possibleBlob);
        }
    }
     drawAndShowContours(imgThresh.size(), currentFrameBlobs, "imgCurrentFrameBlobs");
     if (blnFirstFrame == true) {
        for (auto &currentFrameBlob : currentFrameBlobs) {
            blobs.push_back(currentFrameBlob);
        }
    }
    else {
        matchCurrentFrameBlobsToExistingBlobs(blobs, currentFrameBlobs);
    }
     drawAndShowContours(imgThresh.size(), blobs, "imgBlobs");
     imgFrame2Copy = imgFrame2.clone();          // get another copy of frame 2 since we changed the previous frame 2 copy in the processing above
     drawBlobInfoOnImage(blobs, imgFrame2Copy);
     int blnAtLeastOneBlobCrossedTheLine = checkIfBlobsCrossedTheLine(blobs, intVerticalLinePosition, carCountL, carCountR, myfile);
     if (blnAtLeastOneBlobCrossedTheLine == 1) {
        cv::line(imgFrame2Copy, crossingLine[0], crossingLine[1], SCALAR_GREEN, 2);
    }
    else if (blnAtLeastOneBlobCrossedTheLine == 2) {
        cv::line(imgFrame2Copy, crossingLine[0], crossingLine[1], SCALAR_YELLOW, 2);
    }
    else {
        cv::line(imgFrame2Copy, crossingLine[0], crossingLine[1], SCALAR_BLUE, 2);
    }
     drawCarCountOnImage(carCountL, carCountR, imgFrame2Copy);
     cv::imshow("People_Counting_Cross_Line", imgFrame2Copy);
     //cv::waitKey(0);                 // uncomment this line to go frame by frame for debugging
     // now we prepare for the next iteration
     currentFrameBlobs.clear();
     imgFrame1 = imgFrame2.clone();           // move frame 1 up to where frame 2 is
    capVideo.read(imgFrame2);
    if ((capVideo.get(CV_CAP_PROP_POS_FRAMES) + 1) < capVideo.get(CV_CAP_PROP_FRAME_COUNT)) {
        capVideo.read(imgFrame2L);
        resize(imgFrame2L, imgFrame2, cv::Size(imgFrame2L.size().width / FRAME_SCALE, imgFrame2L.size().height / FRAME_SCALE));
    }
    else {
        time_t now = time(0);
        char* dt = strtok(ctime(&now), "\n");;
        std::cout << dt << ",EOF" << std::endl;
        return(0); // end?
    }
     blnFirstFrame = false;
    frameCount++;
    chCheckForEscKey = cv::waitKey(1);
}
 if (chCheckForEscKey != 27) {               // if the user did not press esc (i.e. we reached the end of the video)
    cv::waitKey(0);                         // hold the windows open to allow the "end of video" message to show
}
// note that if the user did press esc, we don't need to hold the windows open, we can simply let the program end which will close the windows
 return(0);

}

} /////////////////////////////////////////////////////////////////////////////////////////////////// void matchCurrentFrameBlobsToExistingBlobs(std::vector<blob> &existingBlobs, std::vector<blob> &currentFrameBlobs) {

{
    for (auto &existingBlob : existingBlobs) {
      existingBlob.blnCurrentMatchFoundOrNewBlob = false;
      existingBlob.predictNextPosition();
}

    }
    for (auto &currentFrameBlob : currentFrameBlobs) {
      int intIndexOfLeastDistance = 0;
     double dblLeastDistance = 100000.;
      for (unsigned int i = 0; i < existingBlobs.size(); i++) {
          if (existingBlobs[i].blnStillBeingTracked == true) {
              double dblDistance = distanceBetweenPoints(currentFrameBlob.centerPositions.back(), existingBlobs[i].predictedNextPosition);
              if (dblDistance < dblLeastDistance) {
                 dblLeastDistance = dblDistance;
                 intIndexOfLeastDistance = i;
            }
        }
    }

    }
            }
        }
        if (dblLeastDistance < currentFrameBlob.dblCurrentDiagonalSize * 0.5) {
         addBlobToExistingBlobs(currentFrameBlob, existingBlobs, intIndexOfLeastDistance);
    }
    }
        else {
         addNewBlob(currentFrameBlob, existingBlobs);
    }

}

    }
    }
    for (auto &existingBlob : existingBlobs) {
      if (existingBlob.blnCurrentMatchFoundOrNewBlob == false) {
         existingBlob.intNumOfConsecutiveFramesWithoutAMatch++;
    }

    }
        if (existingBlob.intNumOfConsecutiveFramesWithoutAMatch >= 5) {
         existingBlob.blnStillBeingTracked = false;
    }

}

}

} } } /////////////////////////////////////////////////////////////////////////////////////////////////// void addBlobToExistingBlobs(Blob &currentFrameBlob, std::vector<blob> &existingBlobs, int &intIndex) {

{
    existingBlobs[intIndex].currentContour = currentFrameBlob.currentContour;
 existingBlobs[intIndex].currentBoundingRect = currentFrameBlob.currentBoundingRect;
  existingBlobs[intIndex].centerPositions.push_back(currentFrameBlob.centerPositions.back());
  existingBlobs[intIndex].dblCurrentDiagonalSize = currentFrameBlob.dblCurrentDiagonalSize;
 existingBlobs[intIndex].dblCurrentAspectRatio = currentFrameBlob.dblCurrentAspectRatio;
  existingBlobs[intIndex].blnStillBeingTracked = true;
 existingBlobs[intIndex].blnCurrentMatchFoundOrNewBlob = true;

}

} /////////////////////////////////////////////////////////////////////////////////////////////////// void addNewBlob(Blob &currentFrameBlob, std::vector<blob> &existingBlobs) {

{
    currentFrameBlob.blnCurrentMatchFoundOrNewBlob = true;
  existingBlobs.push_back(currentFrameBlob);

}

} /////////////////////////////////////////////////////////////////////////////////////////////////// double distanceBetweenPoints(cv::Point point1, cv::Point point2) {

{
    int intX = abs(point1.x - point2.x);
 int intY = abs(point1.y - point2.y);
  return(sqrt(pow(intX, 2) + pow(intY, 2)));

}

} /////////////////////////////////////////////////////////////////////////////////////////////////// void drawAndShowContours(cv::Size imageSize, std::vector<std::vector<cv::point> > contours, std::string strImageName) { cv::Mat image(imageSize, CV_8UC3, SCALAR_BLACK);

SCALAR_BLACK);
    cv::drawContours(image, contours, -1, SCALAR_WHITE, -1);
  //cv::imshow(strImageName, image);

}

} /////////////////////////////////////////////////////////////////////////////////////////////////// void drawAndShowContours(cv::Size imageSize, std::vector<blob> blobs, std::string strImageName) {

{
    cv::Mat image(imageSize, CV_8UC3, SCALAR_BLACK);

std::vector<std::vector<cv::Point>     std::vector<std::vector<cv::point> > contours;
  for (auto &blob : blobs) {
     if (blob.blnStillBeingTracked == true) {
         contours.push_back(blob.currentContour);
    }
}

    }
    }
    cv::drawContours(image, contours, -1, SCALAR_WHITE, -1);
  //cv::imshow(strImageName, image);

}

} /////////////////////////////////////////////////////////////////////////////////////////////////// bool checkIfBlobsCrossedTheLine(std::vector<blob> &blobs, int &intVerticalLinePosition, int &carCountL, int &carCountR, std::ofstream &myfile) { bool blnAtLeastOneBlobCrossedTheLine = 0;

0;
    for (auto blob : blobs) {
      if (blob.blnStillBeingTracked == true && blob.centerPositions.size() >= 2) {
         int prevFrameIndex = (int)blob.centerPositions.size() - 2;
         int currFrameIndex = (int)blob.centerPositions.size() - 1;
          //going left
         if (blob.centerPositions[prevFrameIndex].x > intVerticalLinePosition && blob.centerPositions[currFrameIndex].x <= intVerticalLinePosition) {
            carCountL++;

    carCountL++;

            time_t now = time(0);
            char* dt = strtok(ctime(&now), "\n");;
            std::cout << dt << ",1,0 (Left)" << std::endl;
            myfile << dt << ",1,0" << std::endl;
            blnAtLeastOneBlobCrossedTheLine = 1;
        }

        // going right
        if (blob.centerPositions[prevFrameIndex].x < intVerticalLinePosition && blob.centerPositions[currFrameIndex].x >= intVerticalLinePosition) {
            carCountR++;

            time_t now = time(0);
            char* dt = strtok(ctime(&now), "\n");;
            std::cout << dt << ",0,1 (Right)" << std::endl;
            myfile << dt << ",0,1" << std::endl;
            blnAtLeastOneBlobCrossedTheLine = 2;
        }

    }

}

}
return blnAtLeastOneBlobCrossedTheLine;

}

} /////////////////////////////////////////////////////////////////////////////////////////////////// void drawBlobInfoOnImage(std::vector<blob> &blobs, cv::Mat &imgFrame2Copy) {

{
    for (unsigned int i = 0; i < blobs.size(); i++) {
      if (blobs[i].blnStillBeingTracked == true) {
         cv::rectangle(imgFrame2Copy, blobs[i].currentBoundingRect, SCALAR_RED, 2);
          int intFontFace = CV_FONT_HERSHEY_SIMPLEX;
         double dblFontScale = blobs[i].dblCurrentDiagonalSize / 60.0;
         int intFontThickness = (int)std::round(dblFontScale * 1.0);
          cv::putText(imgFrame2Copy, std::to_string(i), blobs[i].centerPositions.back(), intFontFace, dblFontScale, SCALAR_GREEN, intFontThickness);
    }
}

}

} } } /////////////////////////////////////////////////////////////////////////////////////////////////// void drawCarCountOnImage(int &carCountL, int &carCountR, cv::Mat &imgFrame2Copy) {

{
    int intFontFace = CV_FONT_HERSHEY_SIMPLEX;
 double dblFontScale = (imgFrame2Copy.rows * imgFrame2Copy.cols) / 200000.0;
 int intFontThickness = (int)std::round(dblFontScale * 1.5);
  cv::Size textSizeL = cv::getTextSize("Inside: " + std::to_string(carCountL), intFontFace, dblFontScale, intFontThickness, 0);
 cv::Size textSizeR = cv::getTextSize("Outside: " + std::to_string(carCountR), intFontFace, dblFontScale, intFontThickness, 0);
  cv::Point ptTextBottomLeftPositionL, ptTextBottomLeftPositionR;
  ptTextBottomLeftPositionL.x = imgFrame2Copy.cols - 1 - (int)((double)textSizeL.width * 1.25);
 ptTextBottomLeftPositionL.y = (int)((double)textSizeL.height * 1.25);
  ptTextBottomLeftPositionR.x = ptTextBottomLeftPositionL.x - 350;
 ptTextBottomLeftPositionR.y = ptTextBottomLeftPositionL.y + (textSizeL.height) * 1.25;
  cv::putText(imgFrame2Copy, "Inside: " + std::to_string(carCountL), ptTextBottomLeftPositionL, intFontFace, dblFontScale, SCALAR_GREEN, intFontThickness);
 cv::putText(imgFrame2Copy, "Outside: " + std::to_string(carCountR), ptTextBottomLeftPositionR, intFontFace, dblFontScale, SCALAR_YELLOW, SCALAR_YELLOW,* intFontThickness);

}

Heading

How to calculate number of people counting inside/outside in shop?

Hello. How to calculate how many people are in the shop a lot ? Came out 7 people left 5 people in the shop left 2 people.

Could you tell me how it can be implemented?

Heading

include<opencv2 core="" core.hpp="">

include<opencv2 highgui="" highgui.hpp="">

include<opencv2 imgproc="" imgproc.hpp="">

include<iostream>

include<conio.h> implemented? // it may be necessary to change or remove this line if not using Windows

include <fstream> // file utils

include <ctime> // timestamp stuff

include "Blob.h"

define SHOW_STEPS // un-comment or comment this line to show steps or not

define FRAME_SCALE 1 // divide frame dimentions by this number

// global variables /////////////////////////////////////////////////////////////////////////////// const cv::Scalar SCALAR_BLACK = cv::Scalar(0.0, 0.0, 0.0); const cv::Scalar SCALAR_WHITE = cv::Scalar(255.0, 255.0, 255.0); const cv::Scalar SCALAR_YELLOW = cv::Scalar(0.0, 255.0, 255.0); const cv::Scalar SCALAR_GREEN = cv::Scalar(0.0, 200.0, 0.0); const cv::Scalar SCALAR_RED = cv::Scalar(0.0, 0.0, 255.0); const cv::Scalar SCALAR_BLUE = cv::Scalar(255.0, 0.0, 0.0); // function prototypes //////////////////////////////////////////////////////////////////////////// void matchCurrentFrameBlobsToExistingBlobs(std::vector<blob> &existingBlobs, std::vector<blob> &currentFrameBlobs); void addBlobToExistingBlobs(Blob &currentFrameBlob, std::vector<blob> &existingBlobs, int &intIndex); void addNewBlob(Blob &currentFrameBlob, std::vector<blob> &existingBlobs); double distanceBetweenPoints(cv::Point point1, cv::Point point2); void drawAndShowContours(cv::Size imageSize, std::vector<std::vector<cv::point> main.cpp //вкратце: //Для каждых двух идущих друг за другом кадров //1) делаем картинку серой - cvtColor //2) применяем на изображении размытие(по Гауссу) - GaussianBlur //3) ищем скалярную разницу между идущим и послеидущим кадром - absdiff //4) пороговое преобразование - threshold //5) растягиваем - dilate - 2 раза и сужаем - erode - один раз //6) находим контуры - findContours //7) проводим выпуклый контур по контурам из п. 6 - Convex Hull.Получаем "сгустки" (blob)белого цвета - это машины и иные объекты //8) с помощью самописной функции matchCurrentFrameBlobsToExistingBlobs проверяем, попадает ли наш сгусток под классификацию машины - по определённым параметрам : расстояние от камеры до дороги, примерное соотношение размера машины к дороге. //9) и, наконец, с помощью самописной функции checkIfBlobsCrossedTheLine - проверяем, пересек ли сгусток позицию красной линии(intHorizontalLinePosition) : //blob.centerPositions.get(prevFrameIndex).y > contours, std::string strImageName); void drawAndShowContours(cv::Size imageSize, std::vector<blob> blobs, std::string strImageName); bool checkIfBlobsCrossedTheLine(std::vector<blob> &blobs, int &intVerticalLinePosition, int &carCountL, int &carCountR, std::ofstream &myfile); void drawBlobInfoOnImage(std::vector<blob> &blobs, cv::Mat &imgFrame2Copy); void drawCarCountOnImage(int &carCountL, int &carCountR, cv::Mat &imgFrame2Copy); /////////////////////////////////////////////////////////////////////////////////////////////////// int main(void) { cv::VideoCapture capVideo; std::ofstream myfile; // log file cv::Mat imgFrame1; cv::Mat imgFrame2; cv::Mat imgFrame1L; cv::Mat imgFrame2L; std::vector<blob> blobs; cv::Point crossingLine[2]; int carCountL = 0; int carCountR = 0; capVideo.open("input1_2.MOV"); //capVideo.open("rtsp://192.168.1.254/sjcam.mov"); //capVideo.open(1);intHorizontalLinePosition && blob.centerPositions.get(currFrameIndex).y <= intHorizontalLinePosition - поток ОТ НАС // //blob.centerPositions.get(prevFrameIndex).y <= intHorizontalLinePosition && blob.centerPositions.get(currFrameIndex).y > intHorizontalLinePosition - поток НА НАС // //При различных настройках параметров в функциях в п.8 и п.9.можно подсчитывать количество машин с разной позиции камеры.Можно даже ограничивать горизонтальную линию лишь по одной полосе, например: //blob.centerPositions.get(prevFrameIndex).y > intHorizontalLinePosition && blob.centerPositions.get(currFrameIndex).y <= intHorizontalLinePosition && //blob.centerPositions.get(currFrameIndex).x >= 360 && //blob.centerPositions.get(currFrameIndex).x <= 620

// log file
myfile.open("/tmp/OpenCV-" + std::string() + "-" + std::to_string(time(0)) + ".txt");
std::cout << "Logging to: \"/tmp/OpenCV-" << "-" << std::to_string(time(0)) << ".txt\"" << std::endl;
myfile << "\"Timestamp\",\"Left\",\"Right\"" << std::endl;
if (!capVideo.isOpened()) {                                                 // if unable to open video file
    std::cout << "error reading video file" << std::endl << std::endl;      // show error message
    _getch();         #include<opencv2/core/core.hpp>
#include<opencv2/highgui/highgui.hpp>
#include<opencv2/imgproc/imgproc.hpp>

#include<iostream>
#include<conio.h>           // it may be necessary to change or remove this line if not using Windows
    return(0);                                                         #include <fstream>      // and exit program
}
if (capVideo.get(CV_CAP_PROP_FRAME_COUNT) < 2) {
file utils
#include <ctime>        // timestamp stuff
#include "Blob.h"

#define SHOW_STEPS            // un-comment or comment this line to show steps or not
#define FRAME_SCALE     1   // divide frame dimentions by this number

// global variables ///////////////////////////////////////////////////////////////////////////////
const cv::Scalar SCALAR_BLACK = cv::Scalar(0.0, 0.0, 0.0);
const cv::Scalar SCALAR_WHITE = cv::Scalar(255.0, 255.0, 255.0);
const cv::Scalar SCALAR_YELLOW = cv::Scalar(0.0, 255.0, 255.0);
const cv::Scalar SCALAR_GREEN = cv::Scalar(0.0, 200.0, 0.0);
const cv::Scalar SCALAR_RED = cv::Scalar(0.0, 0.0, 255.0);
const cv::Scalar SCALAR_BLUE = cv::Scalar(255.0, 0.0, 0.0);

// function prototypes ////////////////////////////////////////////////////////////////////////////
void matchCurrentFrameBlobsToExistingBlobs(std::vector<Blob> &existingBlobs, std::vector<Blob> &currentFrameBlobs);
void addBlobToExistingBlobs(Blob &currentFrameBlob, std::vector<Blob> &existingBlobs, int &intIndex);
void addNewBlob(Blob &currentFrameBlob, std::vector<Blob> &existingBlobs);
double distanceBetweenPoints(cv::Point point1, cv::Point point2);
void drawAndShowContours(cv::Size imageSize, std::vector<std::vector<cv::Point> > contours, std::string strImageName);
void drawAndShowContours(cv::Size imageSize, std::vector<Blob> blobs, std::string strImageName);
bool checkIfBlobsCrossedTheLine(std::vector<Blob> &blobs, int &intVerticalLinePosition, int &carCountL, int &carCountR, std::ofstream &myfile);
void drawBlobInfoOnImage(std::vector<Blob> &blobs, cv::Mat &imgFrame2Copy);
void drawCarCountOnImage(int &carCountL, int &carCountR, cv::Mat &imgFrame2Copy);

///////////////////////////////////////////////////////////////////////////////////////////////////
int main(void) {

    cv::VideoCapture capVideo;
    std::ofstream myfile; // log file

    cv::Mat imgFrame1;
    cv::Mat imgFrame2;
    cv::Mat imgFrame1L;
    cv::Mat imgFrame2L;

    std::vector<Blob> blobs;

    cv::Point crossingLine[2];

    int carCountL = 0;
    int carCountR = 0;

    capVideo.open("input1_2.MOV");
    //capVideo.open("rtsp://192.168.1.254/sjcam.mov");
    //capVideo.open(1);

    // log file
    myfile.open("/tmp/OpenCV-" + std::string() + "-" + std::to_string(time(0)) + ".txt");
    std::cout << "error: "Logging to: \"/tmp/OpenCV-" << "-" << std::to_string(time(0)) << ".txt\"" << std::endl;

    myfile << "\"Timestamp\",\"Left\",\"Right\"" << std::endl;

    if (!capVideo.isOpened()) {                                                 // if unable to open video file must have at least two frames";
file
        std::cout << "error reading video file" << std::endl << std::endl;      // show error message
        _getch();                   // it may be necessary to change or remove this line if not using Windows
        return(0);                                                              // and exit program
    }

    if (capVideo.get(CV_CAP_PROP_FRAME_COUNT) < 2) {
        std::cout << "error: video file must have at least two frames";
        _getch();                   // it may be necessary to change or remove this line if not using Windows
        return(0);
}
    }

    capVideo.read(imgFrame1L);
 capVideo.read(imgFrame2L);
 resize(imgFrame1L, imgFrame1, cv::Size(imgFrame1L.size().width / FRAME_SCALE, imgFrame1L.size().height / FRAME_SCALE));
 resize(imgFrame2L, imgFrame2, cv::Size(imgFrame2L.size().width / FRAME_SCALE, imgFrame2L.size().height / FRAME_SCALE));
 //int intHorizontalLinePosition = (int)std::round((double)imgFrame1.rows * 0.35);
 int intVerticalLinePosition = (int)std::round((double)imgFrame1.cols * 0.50);
 crossingLine[0].y = 0;
 crossingLine[0].x = intVerticalLinePosition;
 crossingLine[1].y = imgFrame1.rows - 1;
 crossingLine[1].x = intVerticalLinePosition;
 char chCheckForEscKey = 0;
 bool blnFirstFrame = true;
 int frameCount = 2;
 while (capVideo.isOpened() && chCheckForEscKey != 27) {
     std::vector<Blob> currentFrameBlobs;
     cv::Mat imgFrame1Copy = imgFrame1.clone();
     cv::Mat imgFrame2Copy = imgFrame2.clone();
     cv::Mat imgDifference;
     cv::Mat imgThresh;
     cv::cvtColor(imgFrame1Copy, imgFrame1Copy, CV_BGR2GRAY);
     cv::cvtColor(imgFrame2Copy, imgFrame2Copy, CV_BGR2GRAY);
     cv::GaussianBlur(imgFrame1Copy, imgFrame1Copy, cv::Size(5, 5), 0);
     cv::GaussianBlur(imgFrame2Copy, imgFrame2Copy, cv::Size(5, 5), 0);
     cv::absdiff(imgFrame1Copy, imgFrame2Copy, imgDifference);
     cv::threshold(imgDifference, imgThresh, 30, 255.0, CV_THRESH_BINARY);
     //cv::imshow("imgThresh", imgThresh);
     cv::Mat structuringElement3x3 = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(3, 3));
     cv::Mat structuringElement5x5 = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(5, 5));
     cv::Mat structuringElement7x7 = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(7, 7));
     cv::Mat structuringElement15x15 = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(15, 15));
     for (unsigned int i = 0; i < 2; i++) {
         cv::dilate(imgThresh, imgThresh, structuringElement5x5);
         cv::dilate(imgThresh, imgThresh, structuringElement5x5);
         cv::erode(imgThresh, imgThresh, structuringElement5x5);
    }
    }

        cv::Mat imgThreshCopy = imgThresh.clone();
     std::vector<std::vector<cv::Point> > contours;
     cv::findContours(imgThreshCopy, contours, cv::RETR_EXTERNAL, cv::CHAIN_APPROX_SIMPLE);
     drawAndShowContours(imgThresh.size(), contours, "imgContours");
     std::vector<std::vector<cv::Point> > convexHulls(contours.size());
     for (unsigned int i = 0; i < contours.size(); i++) {
         cv::convexHull(contours[i], convexHulls[i]);
    }
    }

        drawAndShowContours(imgThresh.size(), convexHulls, "imgConvexHulls");
     for (auto &convexHull : convexHulls) {
         Blob possibleBlob(convexHull);
         if (possibleBlob.currentBoundingRect.area() > 800 &&
             possibleBlob.dblCurrentAspectRatio > 0.2 &&
             possibleBlob.dblCurrentAspectRatio < 4.0 &&
             possibleBlob.currentBoundingRect.width > 40 &&
             possibleBlob.currentBoundingRect.height > 40 &&
             possibleBlob.dblCurrentDiagonalSize > 90.0 &&
             (cv::contourArea(possibleBlob.currentContour) / (double)possibleBlob.currentBoundingRect.area()) > 0.50) {
             currentFrameBlobs.push_back(possibleBlob);
        }
    }
    }
        }

        drawAndShowContours(imgThresh.size(), currentFrameBlobs, "imgCurrentFrameBlobs");
     if (blnFirstFrame == true) {
         for (auto &currentFrameBlob : currentFrameBlobs) {
             blobs.push_back(currentFrameBlob);
        }
    }
    }
        }
        else {
         matchCurrentFrameBlobsToExistingBlobs(blobs, currentFrameBlobs);
    }
    }

        drawAndShowContours(imgThresh.size(), blobs, "imgBlobs");
     imgFrame2Copy = imgFrame2.clone();          // get another copy of frame 2 since we changed the previous frame 2 copy in the processing above
     drawBlobInfoOnImage(blobs, imgFrame2Copy);
     int blnAtLeastOneBlobCrossedTheLine = checkIfBlobsCrossedTheLine(blobs, intVerticalLinePosition, carCountL, carCountR, myfile);
     if (blnAtLeastOneBlobCrossedTheLine == 1) {
         cv::line(imgFrame2Copy, crossingLine[0], crossingLine[1], SCALAR_GREEN, 2);
    }
    }
        else if (blnAtLeastOneBlobCrossedTheLine == 2) {
         cv::line(imgFrame2Copy, crossingLine[0], crossingLine[1], SCALAR_YELLOW, 2);
    }
    }
        else {
         cv::line(imgFrame2Copy, crossingLine[0], crossingLine[1], SCALAR_BLUE, 2);
    }
    }

        drawCarCountOnImage(carCountL, carCountR, imgFrame2Copy);
     cv::imshow("People_Counting_Cross_Line", imgFrame2Copy);
     //cv::waitKey(0);                 // uncomment this line to go frame by frame for debugging
     // now we prepare for the next iteration
     currentFrameBlobs.clear();
     imgFrame1 = imgFrame2.clone();           // move frame 1 up to where frame 2 is
     capVideo.read(imgFrame2);
     if ((capVideo.get(CV_CAP_PROP_POS_FRAMES) + 1) < capVideo.get(CV_CAP_PROP_FRAME_COUNT)) {
         capVideo.read(imgFrame2L);
         resize(imgFrame2L, imgFrame2, cv::Size(imgFrame2L.size().width / FRAME_SCALE, imgFrame2L.size().height / FRAME_SCALE));
    }
    }
        else {
         time_t now = time(0);
         char* dt = strtok(ctime(&now), "\n");;
         std::cout << dt << ",EOF" << std::endl;
         return(0); // end?
    }
    }

        blnFirstFrame = false;
     frameCount++;
     chCheckForEscKey = cv::waitKey(1);
}
    }

    if (chCheckForEscKey != 27) {               // if the user did not press esc (i.e. we reached the end of the video)
     cv::waitKey(0);                         // hold the windows open to allow the "end of video" message to show
}
    }
    // note that if the user did press esc, we don't need to hold the windows open, we can simply let the program end which will close the windows
 return(0);

} } /////////////////////////////////////////////////////////////////////////////////////////////////// void matchCurrentFrameBlobsToExistingBlobs(std::vector<blob> matchCurrentFrameBlobsToExistingBlobs(std::vector<Blob> &existingBlobs, std::vector<blob> std::vector<Blob> &currentFrameBlobs) { for (auto &existingBlob : existingBlobs) { existingBlob.blnCurrentMatchFoundOrNewBlob = false; existingBlob.predictNextPosition(); } for (auto &currentFrameBlob : currentFrameBlobs) { int intIndexOfLeastDistance = 0; double dblLeastDistance = 100000.; for (unsigned int i = 0; i < existingBlobs.size(); i++) { if (existingBlobs[i].blnStillBeingTracked == true) { double dblDistance = distanceBetweenPoints(currentFrameBlob.centerPositions.back(), existingBlobs[i].predictedNextPosition); if (dblDistance < dblLeastDistance) { dblLeastDistance = dblDistance; intIndexOfLeastDistance = i; } } } if (dblLeastDistance < currentFrameBlob.dblCurrentDiagonalSize * 0.5) { addBlobToExistingBlobs(currentFrameBlob, existingBlobs, intIndexOfLeastDistance); } else { addNewBlob(currentFrameBlob, existingBlobs); } } } for (auto &existingBlob : existingBlobs) { if (existingBlob.blnCurrentMatchFoundOrNewBlob == false) { existingBlob.intNumOfConsecutiveFramesWithoutAMatch++; } if (existingBlob.intNumOfConsecutiveFramesWithoutAMatch >= 5) { existingBlob.blnStillBeingTracked = false; } } } } } /////////////////////////////////////////////////////////////////////////////////////////////////// void addBlobToExistingBlobs(Blob &currentFrameBlob, std::vector<blob> std::vector<Blob> &existingBlobs, int &intIndex) { existingBlobs[intIndex].currentContour = currentFrameBlob.currentContour; existingBlobs[intIndex].currentBoundingRect = currentFrameBlob.currentBoundingRect; existingBlobs[intIndex].centerPositions.push_back(currentFrameBlob.centerPositions.back()); existingBlobs[intIndex].dblCurrentDiagonalSize = currentFrameBlob.dblCurrentDiagonalSize; existingBlobs[intIndex].dblCurrentAspectRatio = currentFrameBlob.dblCurrentAspectRatio; existingBlobs[intIndex].blnStillBeingTracked = true; existingBlobs[intIndex].blnCurrentMatchFoundOrNewBlob = true; } /////////////////////////////////////////////////////////////////////////////////////////////////// void addNewBlob(Blob &currentFrameBlob, std::vector<blob> std::vector<Blob> &existingBlobs) { currentFrameBlob.blnCurrentMatchFoundOrNewBlob = true; existingBlobs.push_back(currentFrameBlob); } /////////////////////////////////////////////////////////////////////////////////////////////////// double distanceBetweenPoints(cv::Point point1, cv::Point point2) { int intX = abs(point1.x - point2.x); int intY = abs(point1.y - point2.y); return(sqrt(pow(intX, 2) + pow(intY, 2))); } /////////////////////////////////////////////////////////////////////////////////////////////////// void drawAndShowContours(cv::Size imageSize, std::vector<std::vector<cv::point> std::vector<std::vector<cv::Point> > contours, std::string strImageName) { cv::Mat image(imageSize, CV_8UC3, SCALAR_BLACK); cv::drawContours(image, contours, -1, SCALAR_WHITE, -1); //cv::imshow(strImageName, image); } /////////////////////////////////////////////////////////////////////////////////////////////////// void drawAndShowContours(cv::Size imageSize, std::vector<blob> std::vector<Blob> blobs, std::string strImageName) { cv::Mat image(imageSize, CV_8UC3, SCALAR_BLACK); std::vector<std::vector<cv::point> std::vector<std::vector<cv::Point> > contours; for (auto &blob : blobs) { if (blob.blnStillBeingTracked == true) { contours.push_back(blob.currentContour); } } cv::drawContours(image, contours, -1, SCALAR_WHITE, -1); //cv::imshow(strImageName, image); } /////////////////////////////////////////////////////////////////////////////////////////////////// bool checkIfBlobsCrossedTheLine(std::vector<blob> checkIfBlobsCrossedTheLine(std::vector<Blob> &blobs, int &intVerticalLinePosition, int &carCountL, int &carCountR, std::ofstream &myfile) { bool blnAtLeastOneBlobCrossedTheLine = 0; for (auto blob : blobs) { if (blob.blnStillBeingTracked == true && blob.centerPositions.size() >= 2) { int prevFrameIndex = (int)blob.centerPositions.size() - 2; int currFrameIndex = (int)blob.centerPositions.size() - 1; //going left if (blob.centerPositions[prevFrameIndex].x > intVerticalLinePosition && blob.centerPositions[currFrameIndex].x <= intVerticalLinePosition) { carCountL++;

carCountL++;

                time_t now = time(0);
             char* dt = strtok(ctime(&now), "\n");;
             std::cout << dt << ",1,0 (Left)" << std::endl;
             myfile << dt << ",1,0" << std::endl;
             blnAtLeastOneBlobCrossedTheLine = 1;
        }

    }

            // going right
         if (blob.centerPositions[prevFrameIndex].x < intVerticalLinePosition && blob.centerPositions[currFrameIndex].x >= intVerticalLinePosition) {
             carCountR++;

             time_t now = time(0);
             char* dt = strtok(ctime(&now), "\n");;
             std::cout << dt << ",0,1 (Right)" << std::endl;
             myfile << dt << ",0,1" << std::endl;
             blnAtLeastOneBlobCrossedTheLine = 2;
        }

    }
}
    }

        }

    }

    return blnAtLeastOneBlobCrossedTheLine;

} } /////////////////////////////////////////////////////////////////////////////////////////////////// void drawBlobInfoOnImage(std::vector<blob> drawBlobInfoOnImage(std::vector<Blob> &blobs, cv::Mat &imgFrame2Copy) { for (unsigned int i = 0; i < blobs.size(); i++) { if (blobs[i].blnStillBeingTracked == true) { cv::rectangle(imgFrame2Copy, blobs[i].currentBoundingRect, SCALAR_RED, 2); int intFontFace = CV_FONT_HERSHEY_SIMPLEX; double dblFontScale = blobs[i].dblCurrentDiagonalSize / 60.0; int intFontThickness = (int)std::round(dblFontScale * 1.0); cv::putText(imgFrame2Copy, std::to_string(i), blobs[i].centerPositions.back(), intFontFace, dblFontScale, SCALAR_GREEN, intFontThickness); } } } /////////////////////////////////////////////////////////////////////////////////////////////////// void drawCarCountOnImage(int &carCountL, int &carCountR, cv::Mat &imgFrame2Copy) { int intFontFace = CV_FONT_HERSHEY_SIMPLEX; double dblFontScale = (imgFrame2Copy.rows * imgFrame2Copy.cols) / 200000.0; int intFontThickness = (int)std::round(dblFontScale * 1.5); cv::Size textSizeL = cv::getTextSize("Inside: " + std::to_string(carCountL), intFontFace, dblFontScale, intFontThickness, 0); cv::Size textSizeR = cv::getTextSize("Outside: " + std::to_string(carCountR), intFontFace, dblFontScale, intFontThickness, 0); cv::Point ptTextBottomLeftPositionL, ptTextBottomLeftPositionR; ptTextBottomLeftPositionL.x = imgFrame2Copy.cols - 1 - (int)((double)textSizeL.width * 1.25); ptTextBottomLeftPositionL.y = (int)((double)textSizeL.height * 1.25); ptTextBottomLeftPositionR.x = ptTextBottomLeftPositionL.x - 350; ptTextBottomLeftPositionR.y = ptTextBottomLeftPositionL.y + (textSizeL.height) * 1.25; cv::putText(imgFrame2Copy, "Inside: " + std::to_string(carCountL), ptTextBottomLeftPositionL, intFontFace, dblFontScale, SCALAR_GREEN, intFontThickness); cv::putText(imgFrame2Copy, "Outside: " + std::to_string(carCountR), ptTextBottomLeftPositionR, intFontFace, dblFontScale, SCALAR_YELLOW,* SCALAR_YELLOW, intFontThickness); }

Heading

}

How to calculate number of people counting inside/outside in shop?

Hello. How to calculate how many people are in the shop a lot ? Came out 7 people left 5 people in the shop left 2 people.

Could you tell me how it can be implemented? // main.cpp //вкратце: //Для каждых двух идущих друг за другом кадров //1) делаем картинку серой - cvtColor //2) применяем на изображении размытие(по Гауссу) - GaussianBlur //3) ищем скалярную разницу между идущим и послеидущим кадром - absdiff //4) пороговое преобразование - threshold //5) растягиваем - dilate - 2 раза и сужаем - erode - один раз //6) находим контуры - findContours //7) проводим выпуклый контур по контурам из п. 6 - Convex Hull.Получаем "сгустки" (blob)белого цвета - это машины и иные объекты //8) с помощью самописной функции matchCurrentFrameBlobsToExistingBlobs проверяем, попадает ли наш сгусток под классификацию машины - по определённым параметрам : расстояние от камеры до дороги, примерное соотношение размера машины к дороге. //9) и, наконец, с помощью самописной функции checkIfBlobsCrossedTheLine - проверяем, пересек ли сгусток позицию красной линии(intHorizontalLinePosition) : //blob.centerPositions.get(prevFrameIndex).y > intHorizontalLinePosition && blob.centerPositions.get(currFrameIndex).y <= intHorizontalLinePosition - поток ОТ НАС // //blob.centerPositions.get(prevFrameIndex).y <= intHorizontalLinePosition && blob.centerPositions.get(currFrameIndex).y > intHorizontalLinePosition - поток НА НАС // //При различных настройках параметров в функциях в п.8 и п.9.можно подсчитывать количество машин с разной позиции камеры.Можно даже ограничивать горизонтальную линию лишь по одной полосе, например: //blob.centerPositions.get(prevFrameIndex).y > intHorizontalLinePosition && blob.centerPositions.get(currFrameIndex).y <= intHorizontalLinePosition && //blob.centerPositions.get(currFrameIndex).x >= 360 && //blob.centerPositions.get(currFrameIndex).x <= 620implemented?

#include<opencv2/core/core.hpp>
#include<opencv2/highgui/highgui.hpp>
#include<opencv2/imgproc/imgproc.hpp>

#include<iostream>
#include<conio.h>           // it may be necessary to change or remove this line if not using Windows
#include <fstream>      // file utils
#include <ctime>        // timestamp stuff
#include "Blob.h"

#define SHOW_STEPS            // un-comment or comment this line to show steps or not
#define FRAME_SCALE     1   // divide frame dimentions by this number

// global variables ///////////////////////////////////////////////////////////////////////////////
const cv::Scalar SCALAR_BLACK = cv::Scalar(0.0, 0.0, 0.0);
const cv::Scalar SCALAR_WHITE = cv::Scalar(255.0, 255.0, 255.0);
const cv::Scalar SCALAR_YELLOW = cv::Scalar(0.0, 255.0, 255.0);
const cv::Scalar SCALAR_GREEN = cv::Scalar(0.0, 200.0, 0.0);
const cv::Scalar SCALAR_RED = cv::Scalar(0.0, 0.0, 255.0);
const cv::Scalar SCALAR_BLUE = cv::Scalar(255.0, 0.0, 0.0);

// function prototypes ////////////////////////////////////////////////////////////////////////////
void matchCurrentFrameBlobsToExistingBlobs(std::vector<Blob> &existingBlobs, std::vector<Blob> &currentFrameBlobs);
void addBlobToExistingBlobs(Blob &currentFrameBlob, std::vector<Blob> &existingBlobs, int &intIndex);
void addNewBlob(Blob &currentFrameBlob, std::vector<Blob> &existingBlobs);
double distanceBetweenPoints(cv::Point point1, cv::Point point2);
void drawAndShowContours(cv::Size imageSize, std::vector<std::vector<cv::Point> > contours, std::string strImageName);
void drawAndShowContours(cv::Size imageSize, std::vector<Blob> blobs, std::string strImageName);
bool checkIfBlobsCrossedTheLine(std::vector<Blob> &blobs, int &intVerticalLinePosition, int &carCountL, int &carCountR, std::ofstream &myfile);
void drawBlobInfoOnImage(std::vector<Blob> &blobs, cv::Mat &imgFrame2Copy);
void drawCarCountOnImage(int &carCountL, int &carCountR, cv::Mat &imgFrame2Copy);

///////////////////////////////////////////////////////////////////////////////////////////////////
int main(void) {

    cv::VideoCapture capVideo;
    std::ofstream myfile; // log file

    cv::Mat imgFrame1;
    cv::Mat imgFrame2;
    cv::Mat imgFrame1L;
    cv::Mat imgFrame2L;

    std::vector<Blob> blobs;

    cv::Point crossingLine[2];

    int carCountL = 0;
    int carCountR = 0;

    capVideo.open("input1_2.MOV");
    //capVideo.open("rtsp://192.168.1.254/sjcam.mov");
    //capVideo.open(1);

    // log file
    myfile.open("/tmp/OpenCV-" + std::string() + "-" + std::to_string(time(0)) + ".txt");
    std::cout << "Logging to: \"/tmp/OpenCV-" << "-" << std::to_string(time(0)) << ".txt\"" << std::endl;

    myfile << "\"Timestamp\",\"Left\",\"Right\"" << std::endl;

    if (!capVideo.isOpened()) {                                                 // if unable to open video file
        std::cout << "error reading video file" << std::endl << std::endl;      // show error message
        _getch();                   // it may be necessary to change or remove this line if not using Windows
        return(0);                                                              // and exit program
    }

    if (capVideo.get(CV_CAP_PROP_FRAME_COUNT) < 2) {
        std::cout << "error: video file must have at least two frames";
        _getch();                   // it may be necessary to change or remove this line if not using Windows
        return(0);
    }

    capVideo.read(imgFrame1L);
    capVideo.read(imgFrame2L);

    resize(imgFrame1L, imgFrame1, cv::Size(imgFrame1L.size().width / FRAME_SCALE, imgFrame1L.size().height / FRAME_SCALE));
    resize(imgFrame2L, imgFrame2, cv::Size(imgFrame2L.size().width / FRAME_SCALE, imgFrame2L.size().height / FRAME_SCALE));


    //int intHorizontalLinePosition = (int)std::round((double)imgFrame1.rows * 0.35);
    int intVerticalLinePosition = (int)std::round((double)imgFrame1.cols * 0.50);

    crossingLine[0].y = 0;
    crossingLine[0].x = intVerticalLinePosition;

    crossingLine[1].y = imgFrame1.rows - 1;
    crossingLine[1].x = intVerticalLinePosition;

    char chCheckForEscKey = 0;

    bool blnFirstFrame = true;

    int frameCount = 2;

    while (capVideo.isOpened() && chCheckForEscKey != 27) {

        std::vector<Blob> currentFrameBlobs;

        cv::Mat imgFrame1Copy = imgFrame1.clone();
        cv::Mat imgFrame2Copy = imgFrame2.clone();

        cv::Mat imgDifference;
        cv::Mat imgThresh;

        cv::cvtColor(imgFrame1Copy, imgFrame1Copy, CV_BGR2GRAY);
        cv::cvtColor(imgFrame2Copy, imgFrame2Copy, CV_BGR2GRAY);

        cv::GaussianBlur(imgFrame1Copy, imgFrame1Copy, cv::Size(5, 5), 0);
        cv::GaussianBlur(imgFrame2Copy, imgFrame2Copy, cv::Size(5, 5), 0);

        cv::absdiff(imgFrame1Copy, imgFrame2Copy, imgDifference);

        cv::threshold(imgDifference, imgThresh, 30, 255.0, CV_THRESH_BINARY);

        //cv::imshow("imgThresh", imgThresh);

        cv::Mat structuringElement3x3 = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(3, 3));
        cv::Mat structuringElement5x5 = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(5, 5));
        cv::Mat structuringElement7x7 = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(7, 7));
        cv::Mat structuringElement15x15 = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(15, 15));

        for (unsigned int i = 0; i < 2; i++) {
            cv::dilate(imgThresh, imgThresh, structuringElement5x5);
            cv::dilate(imgThresh, imgThresh, structuringElement5x5);
            cv::erode(imgThresh, imgThresh, structuringElement5x5);
        }

        cv::Mat imgThreshCopy = imgThresh.clone();

        std::vector<std::vector<cv::Point> > contours;

        cv::findContours(imgThreshCopy, contours, cv::RETR_EXTERNAL, cv::CHAIN_APPROX_SIMPLE);

        drawAndShowContours(imgThresh.size(), contours, "imgContours");

        std::vector<std::vector<cv::Point> > convexHulls(contours.size());

        for (unsigned int i = 0; i < contours.size(); i++) {
            cv::convexHull(contours[i], convexHulls[i]);
        }

        drawAndShowContours(imgThresh.size(), convexHulls, "imgConvexHulls");

        for (auto &convexHull : convexHulls) {
            Blob possibleBlob(convexHull);

            if (possibleBlob.currentBoundingRect.area() > 800 &&
                possibleBlob.dblCurrentAspectRatio > 0.2 &&
                possibleBlob.dblCurrentAspectRatio < 4.0 &&
                possibleBlob.currentBoundingRect.width > 40 &&
                possibleBlob.currentBoundingRect.height > 40 &&
                possibleBlob.dblCurrentDiagonalSize > 90.0 &&
                (cv::contourArea(possibleBlob.currentContour) / (double)possibleBlob.currentBoundingRect.area()) > 0.50) {
                currentFrameBlobs.push_back(possibleBlob);
            }
        }

        drawAndShowContours(imgThresh.size(), currentFrameBlobs, "imgCurrentFrameBlobs");

        if (blnFirstFrame == true) {
            for (auto &currentFrameBlob : currentFrameBlobs) {
                blobs.push_back(currentFrameBlob);
            }
        }
        else {
            matchCurrentFrameBlobsToExistingBlobs(blobs, currentFrameBlobs);
        }

        drawAndShowContours(imgThresh.size(), blobs, "imgBlobs");

        imgFrame2Copy = imgFrame2.clone();          // get another copy of frame 2 since we changed the previous frame 2 copy in the processing above

        drawBlobInfoOnImage(blobs, imgFrame2Copy);

        int blnAtLeastOneBlobCrossedTheLine = checkIfBlobsCrossedTheLine(blobs, intVerticalLinePosition, carCountL, carCountR, myfile);

        if (blnAtLeastOneBlobCrossedTheLine == 1) {
            cv::line(imgFrame2Copy, crossingLine[0], crossingLine[1], SCALAR_GREEN, 2);
        }
        else if (blnAtLeastOneBlobCrossedTheLine == 2) {
            cv::line(imgFrame2Copy, crossingLine[0], crossingLine[1], SCALAR_YELLOW, 2);
        }
        else {
            cv::line(imgFrame2Copy, crossingLine[0], crossingLine[1], SCALAR_BLUE, 2);
        }

        drawCarCountOnImage(carCountL, carCountR, imgFrame2Copy);

        cv::imshow("People_Counting_Cross_Line", imgFrame2Copy);

        //cv::waitKey(0);                 // uncomment this line to go frame by frame for debugging

        // now we prepare for the next iteration

        currentFrameBlobs.clear();

        imgFrame1 = imgFrame2.clone();           // move frame 1 up to where frame 2 is
        capVideo.read(imgFrame2);
        if ((capVideo.get(CV_CAP_PROP_POS_FRAMES) + 1) < capVideo.get(CV_CAP_PROP_FRAME_COUNT)) {
            capVideo.read(imgFrame2L);
            resize(imgFrame2L, imgFrame2, cv::Size(imgFrame2L.size().width / FRAME_SCALE, imgFrame2L.size().height / FRAME_SCALE));
        }
        else {
            time_t now = time(0);
            char* dt = strtok(ctime(&now), "\n");;
            std::cout << dt << ",EOF" << std::endl;
            return(0); // end?
        }

        blnFirstFrame = false;
        frameCount++;
        chCheckForEscKey = cv::waitKey(1);
    }

    if (chCheckForEscKey != 27) {               // if the user did not press esc (i.e. we reached the end of the video)
        cv::waitKey(0);                         // hold the windows open to allow the "end of video" message to show
    }
    // note that if the user did press esc, we don't need to hold the windows open, we can simply let the program end which will close the windows

    return(0);
}

///////////////////////////////////////////////////////////////////////////////////////////////////
void matchCurrentFrameBlobsToExistingBlobs(std::vector<Blob> &existingBlobs, std::vector<Blob> &currentFrameBlobs) {

    for (auto &existingBlob : existingBlobs) {

        existingBlob.blnCurrentMatchFoundOrNewBlob = false;

        existingBlob.predictNextPosition();
    }

    for (auto &currentFrameBlob : currentFrameBlobs) {

        int intIndexOfLeastDistance = 0;
        double dblLeastDistance = 100000.;

        for (unsigned int i = 0; i < existingBlobs.size(); i++) {

            if (existingBlobs[i].blnStillBeingTracked == true) {

                double dblDistance = distanceBetweenPoints(currentFrameBlob.centerPositions.back(), existingBlobs[i].predictedNextPosition);

                if (dblDistance < dblLeastDistance) {
                    dblLeastDistance = dblDistance;
                    intIndexOfLeastDistance = i;
                }
            }
        }

        if (dblLeastDistance < currentFrameBlob.dblCurrentDiagonalSize * 0.5) {
            addBlobToExistingBlobs(currentFrameBlob, existingBlobs, intIndexOfLeastDistance);
        }
        else {
            addNewBlob(currentFrameBlob, existingBlobs);
        }

    }

    for (auto &existingBlob : existingBlobs) {

        if (existingBlob.blnCurrentMatchFoundOrNewBlob == false) {
            existingBlob.intNumOfConsecutiveFramesWithoutAMatch++;
        }

        if (existingBlob.intNumOfConsecutiveFramesWithoutAMatch >= 5) {
            existingBlob.blnStillBeingTracked = false;
        }

    }

}

///////////////////////////////////////////////////////////////////////////////////////////////////
void addBlobToExistingBlobs(Blob &currentFrameBlob, std::vector<Blob> &existingBlobs, int &intIndex) {

    existingBlobs[intIndex].currentContour = currentFrameBlob.currentContour;
    existingBlobs[intIndex].currentBoundingRect = currentFrameBlob.currentBoundingRect;

    existingBlobs[intIndex].centerPositions.push_back(currentFrameBlob.centerPositions.back());

    existingBlobs[intIndex].dblCurrentDiagonalSize = currentFrameBlob.dblCurrentDiagonalSize;
    existingBlobs[intIndex].dblCurrentAspectRatio = currentFrameBlob.dblCurrentAspectRatio;

    existingBlobs[intIndex].blnStillBeingTracked = true;
    existingBlobs[intIndex].blnCurrentMatchFoundOrNewBlob = true;
}

///////////////////////////////////////////////////////////////////////////////////////////////////
void addNewBlob(Blob &currentFrameBlob, std::vector<Blob> &existingBlobs) {

    currentFrameBlob.blnCurrentMatchFoundOrNewBlob = true;

    existingBlobs.push_back(currentFrameBlob);
}

///////////////////////////////////////////////////////////////////////////////////////////////////
double distanceBetweenPoints(cv::Point point1, cv::Point point2) {

    int intX = abs(point1.x - point2.x);
    int intY = abs(point1.y - point2.y);

    return(sqrt(pow(intX, 2) + pow(intY, 2)));
}

///////////////////////////////////////////////////////////////////////////////////////////////////
void drawAndShowContours(cv::Size imageSize, std::vector<std::vector<cv::Point> > contours, std::string strImageName) {
    cv::Mat image(imageSize, CV_8UC3, SCALAR_BLACK);

    cv::drawContours(image, contours, -1, SCALAR_WHITE, -1);

    //cv::imshow(strImageName, image);
}

///////////////////////////////////////////////////////////////////////////////////////////////////
void drawAndShowContours(cv::Size imageSize, std::vector<Blob> blobs, std::string strImageName) {

    cv::Mat image(imageSize, CV_8UC3, SCALAR_BLACK);

    std::vector<std::vector<cv::Point> > contours;

    for (auto &blob : blobs) {
        if (blob.blnStillBeingTracked == true) {
            contours.push_back(blob.currentContour);
        }
    }

    cv::drawContours(image, contours, -1, SCALAR_WHITE, -1);

    //cv::imshow(strImageName, image);
}

///////////////////////////////////////////////////////////////////////////////////////////////////
bool checkIfBlobsCrossedTheLine(std::vector<Blob> &blobs, int &intVerticalLinePosition, int &carCountL, int &carCountR, std::ofstream &myfile) {
    bool blnAtLeastOneBlobCrossedTheLine = 0;

    for (auto blob : blobs) {

        if (blob.blnStillBeingTracked == true && blob.centerPositions.size() >= 2) {
            int prevFrameIndex = (int)blob.centerPositions.size() - 2;
            int currFrameIndex = (int)blob.centerPositions.size() - 1;

            //going left
            if (blob.centerPositions[prevFrameIndex].x > intVerticalLinePosition && blob.centerPositions[currFrameIndex].x <= intVerticalLinePosition) {
                carCountL++;

                time_t now = time(0);
                char* dt = strtok(ctime(&now), "\n");;
                std::cout << dt << ",1,0 (Left)" << std::endl;
                myfile << dt << ",1,0" << std::endl;
                blnAtLeastOneBlobCrossedTheLine = 1;
            }

            // going right
            if (blob.centerPositions[prevFrameIndex].x < intVerticalLinePosition && blob.centerPositions[currFrameIndex].x >= intVerticalLinePosition) {
                carCountR++;

                time_t now = time(0);
                char* dt = strtok(ctime(&now), "\n");;
                std::cout << dt << ",0,1 (Right)" << std::endl;
                myfile << dt << ",0,1" << std::endl;
                blnAtLeastOneBlobCrossedTheLine = 2;
            }

        }

    }

    return blnAtLeastOneBlobCrossedTheLine;
}

///////////////////////////////////////////////////////////////////////////////////////////////////
void drawBlobInfoOnImage(std::vector<Blob> &blobs, cv::Mat &imgFrame2Copy) {

    for (unsigned int i = 0; i < blobs.size(); i++) {

        if (blobs[i].blnStillBeingTracked == true) {
            cv::rectangle(imgFrame2Copy, blobs[i].currentBoundingRect, SCALAR_RED, 2);

            int intFontFace = CV_FONT_HERSHEY_SIMPLEX;
            double dblFontScale = blobs[i].dblCurrentDiagonalSize / 60.0;
            int intFontThickness = (int)std::round(dblFontScale * 1.0);

            cv::putText(imgFrame2Copy, std::to_string(i), blobs[i].centerPositions.back(), intFontFace, dblFontScale, SCALAR_GREEN, intFontThickness);
        }
    }
}

///////////////////////////////////////////////////////////////////////////////////////////////////
void drawCarCountOnImage(int &carCountL, int &carCountR, cv::Mat &imgFrame2Copy) {

    int intFontFace = CV_FONT_HERSHEY_SIMPLEX;
    double dblFontScale = (imgFrame2Copy.rows * imgFrame2Copy.cols) / 200000.0;
    int intFontThickness = (int)std::round(dblFontScale * 1.5);

    cv::Size textSizeL = cv::getTextSize("Inside: " + std::to_string(carCountL), intFontFace, dblFontScale, intFontThickness, 0);
    cv::Size textSizeR = cv::getTextSize("Outside: " + std::to_string(carCountR), intFontFace, dblFontScale, intFontThickness, 0);

    cv::Point ptTextBottomLeftPositionL, ptTextBottomLeftPositionR;

    ptTextBottomLeftPositionL.x = imgFrame2Copy.cols - 1 - (int)((double)textSizeL.width * 1.25);
    ptTextBottomLeftPositionL.y = (int)((double)textSizeL.height * 1.25);

    ptTextBottomLeftPositionR.x = ptTextBottomLeftPositionL.x - 350;
    ptTextBottomLeftPositionR.y = ptTextBottomLeftPositionL.y + (textSizeL.height) * 1.25;

    cv::putText(imgFrame2Copy, "Inside: " + std::to_string(carCountL), ptTextBottomLeftPositionL, intFontFace, dblFontScale, SCALAR_GREEN, intFontThickness);
    cv::putText(imgFrame2Copy, "Outside: " + std::to_string(carCountR), ptTextBottomLeftPositionR, intFontFace, dblFontScale, SCALAR_YELLOW, intFontThickness);

}

How to calculate number of people counting inside/outside in shop?

Hello. How to calculate how many people are in the shop a lot ? Came out 7 people left 5 people in the shop left 2 people.

Could you tell me how it can be implemented?

#include<opencv2/core/core.hpp>
#include<opencv2/highgui/highgui.hpp>
#include<opencv2/imgproc/imgproc.hpp>

#include<iostream>
#include<conio.h>           // it may be necessary to change or remove this line if not using Windows
#include <fstream>      // file utils
#include <ctime>        // timestamp stuff
#include "Blob.h"

#define SHOW_STEPS            // un-comment or comment this line to show steps or not
#define FRAME_SCALE     1   // divide frame dimentions by this number

// global variables ///////////////////////////////////////////////////////////////////////////////
const cv::Scalar SCALAR_BLACK = cv::Scalar(0.0, 0.0, 0.0);
const cv::Scalar SCALAR_WHITE = cv::Scalar(255.0, 255.0, 255.0);
const cv::Scalar SCALAR_YELLOW = cv::Scalar(0.0, 255.0, 255.0);
const cv::Scalar SCALAR_GREEN = cv::Scalar(0.0, 200.0, 0.0);
const cv::Scalar SCALAR_RED = cv::Scalar(0.0, 0.0, 255.0);
const cv::Scalar SCALAR_BLUE = cv::Scalar(255.0, 0.0, 0.0);

// function prototypes ////////////////////////////////////////////////////////////////////////////
void matchCurrentFrameBlobsToExistingBlobs(std::vector<Blob> &existingBlobs, std::vector<Blob> &currentFrameBlobs);
void addBlobToExistingBlobs(Blob &currentFrameBlob, std::vector<Blob> &existingBlobs, int &intIndex);
void addNewBlob(Blob &currentFrameBlob, std::vector<Blob> &existingBlobs);
double distanceBetweenPoints(cv::Point point1, cv::Point point2);
void drawAndShowContours(cv::Size imageSize, std::vector<std::vector<cv::Point> > contours, std::string strImageName);
void drawAndShowContours(cv::Size imageSize, std::vector<Blob> blobs, std::string strImageName);
bool checkIfBlobsCrossedTheLine(std::vector<Blob> &blobs, int &intVerticalLinePosition, int &carCountL, &ShopCountL, int &carCountR, &ShopCountR, std::ofstream &myfile);
void drawBlobInfoOnImage(std::vector<Blob> &blobs, cv::Mat &imgFrame2Copy);
void drawCarCountOnImage(int &carCountL, drawShopCountOnImage(int &ShopCountL, int &carCountR, &ShopCountR, cv::Mat &imgFrame2Copy);

///////////////////////////////////////////////////////////////////////////////////////////////////
int main(void) {

    cv::VideoCapture capVideo;
    std::ofstream myfile; // log file

    cv::Mat imgFrame1;
    cv::Mat imgFrame2;
    cv::Mat imgFrame1L;
    cv::Mat imgFrame2L;

    std::vector<Blob> blobs;

    cv::Point crossingLine[2];

    int carCountL ShopCountL = 0;
    int carCountR ShopCountR = 0;

    capVideo.open("input1_2.MOV");
    //capVideo.open("rtsp://192.168.1.254/sjcam.mov");
    //capVideo.open(1);

    // log file
    myfile.open("/tmp/OpenCV-" + std::string() + "-" + std::to_string(time(0)) + ".txt");
    std::cout << "Logging to: \"/tmp/OpenCV-" << "-" << std::to_string(time(0)) << ".txt\"" << std::endl;

    myfile << "\"Timestamp\",\"Left\",\"Right\"" << std::endl;

    if (!capVideo.isOpened()) {                                                 // if unable to open video file
        std::cout << "error reading video file" << std::endl << std::endl;      // show error message
        _getch();                   // it may be necessary to change or remove this line if not using Windows
        return(0);                                                              // and exit program
    }

    if (capVideo.get(CV_CAP_PROP_FRAME_COUNT) < 2) {
        std::cout << "error: video file must have at least two frames";
        _getch();                   // it may be necessary to change or remove this line if not using Windows
        return(0);
    }

    capVideo.read(imgFrame1L);
    capVideo.read(imgFrame2L);

    resize(imgFrame1L, imgFrame1, cv::Size(imgFrame1L.size().width / FRAME_SCALE, imgFrame1L.size().height / FRAME_SCALE));
    resize(imgFrame2L, imgFrame2, cv::Size(imgFrame2L.size().width / FRAME_SCALE, imgFrame2L.size().height / FRAME_SCALE));


    //int intHorizontalLinePosition = (int)std::round((double)imgFrame1.rows * 0.35);
    int intVerticalLinePosition = (int)std::round((double)imgFrame1.cols * 0.50);

    crossingLine[0].y = 0;
    crossingLine[0].x = intVerticalLinePosition;

    crossingLine[1].y = imgFrame1.rows - 1;
    crossingLine[1].x = intVerticalLinePosition;

    char chCheckForEscKey = 0;

    bool blnFirstFrame = true;

    int frameCount = 2;

    while (capVideo.isOpened() && chCheckForEscKey != 27) {

        std::vector<Blob> currentFrameBlobs;

        cv::Mat imgFrame1Copy = imgFrame1.clone();
        cv::Mat imgFrame2Copy = imgFrame2.clone();

        cv::Mat imgDifference;
        cv::Mat imgThresh;

        cv::cvtColor(imgFrame1Copy, imgFrame1Copy, CV_BGR2GRAY);
        cv::cvtColor(imgFrame2Copy, imgFrame2Copy, CV_BGR2GRAY);

        cv::GaussianBlur(imgFrame1Copy, imgFrame1Copy, cv::Size(5, 5), 0);
        cv::GaussianBlur(imgFrame2Copy, imgFrame2Copy, cv::Size(5, 5), 0);

        cv::absdiff(imgFrame1Copy, imgFrame2Copy, imgDifference);

        cv::threshold(imgDifference, imgThresh, 30, 255.0, CV_THRESH_BINARY);

        //cv::imshow("imgThresh", imgThresh);

        cv::Mat structuringElement3x3 = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(3, 3));
        cv::Mat structuringElement5x5 = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(5, 5));
        cv::Mat structuringElement7x7 = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(7, 7));
        cv::Mat structuringElement15x15 = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(15, 15));

        for (unsigned int i = 0; i < 2; i++) {
            cv::dilate(imgThresh, imgThresh, structuringElement5x5);
            cv::dilate(imgThresh, imgThresh, structuringElement5x5);
            cv::erode(imgThresh, imgThresh, structuringElement5x5);
        }

        cv::Mat imgThreshCopy = imgThresh.clone();

        std::vector<std::vector<cv::Point> > contours;

        cv::findContours(imgThreshCopy, contours, cv::RETR_EXTERNAL, cv::CHAIN_APPROX_SIMPLE);

        drawAndShowContours(imgThresh.size(), contours, "imgContours");

        std::vector<std::vector<cv::Point> > convexHulls(contours.size());

        for (unsigned int i = 0; i < contours.size(); i++) {
            cv::convexHull(contours[i], convexHulls[i]);
        }

        drawAndShowContours(imgThresh.size(), convexHulls, "imgConvexHulls");

        for (auto &convexHull : convexHulls) {
            Blob possibleBlob(convexHull);

            if (possibleBlob.currentBoundingRect.area() > 800 &&
                possibleBlob.dblCurrentAspectRatio > 0.2 &&
                possibleBlob.dblCurrentAspectRatio < 4.0 &&
                possibleBlob.currentBoundingRect.width > 40 &&
                possibleBlob.currentBoundingRect.height > 40 &&
                possibleBlob.dblCurrentDiagonalSize > 90.0 &&
                (cv::contourArea(possibleBlob.currentContour) / (double)possibleBlob.currentBoundingRect.area()) > 0.50) {
                currentFrameBlobs.push_back(possibleBlob);
            }
        }

        drawAndShowContours(imgThresh.size(), currentFrameBlobs, "imgCurrentFrameBlobs");

        if (blnFirstFrame == true) {
            for (auto &currentFrameBlob : currentFrameBlobs) {
                blobs.push_back(currentFrameBlob);
            }
        }
        else {
            matchCurrentFrameBlobsToExistingBlobs(blobs, currentFrameBlobs);
        }

        drawAndShowContours(imgThresh.size(), blobs, "imgBlobs");

        imgFrame2Copy = imgFrame2.clone();          // get another copy of frame 2 since we changed the previous frame 2 copy in the processing above

        drawBlobInfoOnImage(blobs, imgFrame2Copy);

        int blnAtLeastOneBlobCrossedTheLine = checkIfBlobsCrossedTheLine(blobs, intVerticalLinePosition, carCountL, carCountR, ShopCountL, ShopCountR, myfile);

        if (blnAtLeastOneBlobCrossedTheLine == 1) {
            cv::line(imgFrame2Copy, crossingLine[0], crossingLine[1], SCALAR_GREEN, 2);
        }
        else if (blnAtLeastOneBlobCrossedTheLine == 2) {
            cv::line(imgFrame2Copy, crossingLine[0], crossingLine[1], SCALAR_YELLOW, 2);
        }
        else {
            cv::line(imgFrame2Copy, crossingLine[0], crossingLine[1], SCALAR_BLUE, 2);
        }

        drawCarCountOnImage(carCountL, carCountR, drawShopCountOnImage(ShopCountL, ShopCountR, imgFrame2Copy);

        cv::imshow("People_Counting_Cross_Line", imgFrame2Copy);

        //cv::waitKey(0);                 // uncomment this line to go frame by frame for debugging

        // now we prepare for the next iteration

        currentFrameBlobs.clear();

        imgFrame1 = imgFrame2.clone();           // move frame 1 up to where frame 2 is
        capVideo.read(imgFrame2);
        if ((capVideo.get(CV_CAP_PROP_POS_FRAMES) + 1) < capVideo.get(CV_CAP_PROP_FRAME_COUNT)) {
            capVideo.read(imgFrame2L);
            resize(imgFrame2L, imgFrame2, cv::Size(imgFrame2L.size().width / FRAME_SCALE, imgFrame2L.size().height / FRAME_SCALE));
        }
        else {
            time_t now = time(0);
            char* dt = strtok(ctime(&now), "\n");;
            std::cout << dt << ",EOF" << std::endl;
            return(0); // end?
        }

        blnFirstFrame = false;
        frameCount++;
        chCheckForEscKey = cv::waitKey(1);
    }

    if (chCheckForEscKey != 27) {               // if the user did not press esc (i.e. we reached the end of the video)
        cv::waitKey(0);                         // hold the windows open to allow the "end of video" message to show
    }
    // note that if the user did press esc, we don't need to hold the windows open, we can simply let the program end which will close the windows

    return(0);
}

///////////////////////////////////////////////////////////////////////////////////////////////////
void matchCurrentFrameBlobsToExistingBlobs(std::vector<Blob> &existingBlobs, std::vector<Blob> &currentFrameBlobs) {

    for (auto &existingBlob : existingBlobs) {

        existingBlob.blnCurrentMatchFoundOrNewBlob = false;

        existingBlob.predictNextPosition();
    }

    for (auto &currentFrameBlob : currentFrameBlobs) {

        int intIndexOfLeastDistance = 0;
        double dblLeastDistance = 100000.;

        for (unsigned int i = 0; i < existingBlobs.size(); i++) {

            if (existingBlobs[i].blnStillBeingTracked == true) {

                double dblDistance = distanceBetweenPoints(currentFrameBlob.centerPositions.back(), existingBlobs[i].predictedNextPosition);

                if (dblDistance < dblLeastDistance) {
                    dblLeastDistance = dblDistance;
                    intIndexOfLeastDistance = i;
                }
            }
        }

        if (dblLeastDistance < currentFrameBlob.dblCurrentDiagonalSize * 0.5) {
            addBlobToExistingBlobs(currentFrameBlob, existingBlobs, intIndexOfLeastDistance);
        }
        else {
            addNewBlob(currentFrameBlob, existingBlobs);
        }

    }

    for (auto &existingBlob : existingBlobs) {

        if (existingBlob.blnCurrentMatchFoundOrNewBlob == false) {
            existingBlob.intNumOfConsecutiveFramesWithoutAMatch++;
        }

        if (existingBlob.intNumOfConsecutiveFramesWithoutAMatch >= 5) {
            existingBlob.blnStillBeingTracked = false;
        }

    }

}

///////////////////////////////////////////////////////////////////////////////////////////////////
void addBlobToExistingBlobs(Blob &currentFrameBlob, std::vector<Blob> &existingBlobs, int &intIndex) {

    existingBlobs[intIndex].currentContour = currentFrameBlob.currentContour;
    existingBlobs[intIndex].currentBoundingRect = currentFrameBlob.currentBoundingRect;

    existingBlobs[intIndex].centerPositions.push_back(currentFrameBlob.centerPositions.back());

    existingBlobs[intIndex].dblCurrentDiagonalSize = currentFrameBlob.dblCurrentDiagonalSize;
    existingBlobs[intIndex].dblCurrentAspectRatio = currentFrameBlob.dblCurrentAspectRatio;

    existingBlobs[intIndex].blnStillBeingTracked = true;
    existingBlobs[intIndex].blnCurrentMatchFoundOrNewBlob = true;
}

///////////////////////////////////////////////////////////////////////////////////////////////////
void addNewBlob(Blob &currentFrameBlob, std::vector<Blob> &existingBlobs) {

    currentFrameBlob.blnCurrentMatchFoundOrNewBlob = true;

    existingBlobs.push_back(currentFrameBlob);
}

///////////////////////////////////////////////////////////////////////////////////////////////////
double distanceBetweenPoints(cv::Point point1, cv::Point point2) {

    int intX = abs(point1.x - point2.x);
    int intY = abs(point1.y - point2.y);

    return(sqrt(pow(intX, 2) + pow(intY, 2)));
}

///////////////////////////////////////////////////////////////////////////////////////////////////
void drawAndShowContours(cv::Size imageSize, std::vector<std::vector<cv::Point> > contours, std::string strImageName) {
    cv::Mat image(imageSize, CV_8UC3, SCALAR_BLACK);

    cv::drawContours(image, contours, -1, SCALAR_WHITE, -1);

    //cv::imshow(strImageName, image);
}

///////////////////////////////////////////////////////////////////////////////////////////////////
void drawAndShowContours(cv::Size imageSize, std::vector<Blob> blobs, std::string strImageName) {

    cv::Mat image(imageSize, CV_8UC3, SCALAR_BLACK);

    std::vector<std::vector<cv::Point> > contours;

    for (auto &blob : blobs) {
        if (blob.blnStillBeingTracked == true) {
            contours.push_back(blob.currentContour);
        }
    }

    cv::drawContours(image, contours, -1, SCALAR_WHITE, -1);

    //cv::imshow(strImageName, image);
}

///////////////////////////////////////////////////////////////////////////////////////////////////
bool checkIfBlobsCrossedTheLine(std::vector<Blob> &blobs, int &intVerticalLinePosition, int &carCountL, &ShopCountL, int &carCountR, &ShopCountR, std::ofstream &myfile) {
    bool blnAtLeastOneBlobCrossedTheLine = 0;

    for (auto blob : blobs) {

        if (blob.blnStillBeingTracked == true && blob.centerPositions.size() >= 2) {
            int prevFrameIndex = (int)blob.centerPositions.size() - 2;
            int currFrameIndex = (int)blob.centerPositions.size() - 1;

            //going left
            if (blob.centerPositions[prevFrameIndex].x > intVerticalLinePosition && blob.centerPositions[currFrameIndex].x <= intVerticalLinePosition) {
                carCountL++;
ShopCountL++;

                time_t now = time(0);
                char* dt = strtok(ctime(&now), "\n");;
                std::cout << dt << ",1,0 (Left)" << std::endl;
                myfile << dt << ",1,0" << std::endl;
                blnAtLeastOneBlobCrossedTheLine = 1;
            }

            // going right
            if (blob.centerPositions[prevFrameIndex].x < intVerticalLinePosition && blob.centerPositions[currFrameIndex].x >= intVerticalLinePosition) {
                carCountR++;
ShopCountR++;

                time_t now = time(0);
                char* dt = strtok(ctime(&now), "\n");;
                std::cout << dt << ",0,1 (Right)" << std::endl;
                myfile << dt << ",0,1" << std::endl;
                blnAtLeastOneBlobCrossedTheLine = 2;
            }

        }

    }

    return blnAtLeastOneBlobCrossedTheLine;
}

///////////////////////////////////////////////////////////////////////////////////////////////////
void drawBlobInfoOnImage(std::vector<Blob> &blobs, cv::Mat &imgFrame2Copy) {

    for (unsigned int i = 0; i < blobs.size(); i++) {

        if (blobs[i].blnStillBeingTracked == true) {
            cv::rectangle(imgFrame2Copy, blobs[i].currentBoundingRect, SCALAR_RED, 2);

            int intFontFace = CV_FONT_HERSHEY_SIMPLEX;
            double dblFontScale = blobs[i].dblCurrentDiagonalSize / 60.0;
            int intFontThickness = (int)std::round(dblFontScale * 1.0);

            cv::putText(imgFrame2Copy, std::to_string(i), blobs[i].centerPositions.back(), intFontFace, dblFontScale, SCALAR_GREEN, intFontThickness);
        }
    }
}

///////////////////////////////////////////////////////////////////////////////////////////////////
void drawCarCountOnImage(int &carCountL, drawShopCountOnImage(int &ShopCountL, int &carCountR, &ShopCountR, cv::Mat &imgFrame2Copy) {

    int intFontFace = CV_FONT_HERSHEY_SIMPLEX;
    double dblFontScale = (imgFrame2Copy.rows * imgFrame2Copy.cols) / 200000.0;
    int intFontThickness = (int)std::round(dblFontScale * 1.5);

    cv::Size textSizeL = cv::getTextSize("Inside: " + std::to_string(carCountL), std::to_string(ShopCountL), intFontFace, dblFontScale, intFontThickness, 0);
    cv::Size textSizeR = cv::getTextSize("Outside: " + std::to_string(carCountR), std::to_string(ShopCountR), intFontFace, dblFontScale, intFontThickness, 0);

    cv::Point ptTextBottomLeftPositionL, ptTextBottomLeftPositionR;

    ptTextBottomLeftPositionL.x = imgFrame2Copy.cols - 1 - (int)((double)textSizeL.width * 1.25);
    ptTextBottomLeftPositionL.y = (int)((double)textSizeL.height * 1.25);

    ptTextBottomLeftPositionR.x = ptTextBottomLeftPositionL.x - 350;
    ptTextBottomLeftPositionR.y = ptTextBottomLeftPositionL.y + (textSizeL.height) * 1.25;

    cv::putText(imgFrame2Copy, "Inside: " + std::to_string(carCountL), std::to_string(ShopCountL), ptTextBottomLeftPositionL, intFontFace, dblFontScale, SCALAR_GREEN, intFontThickness);
    cv::putText(imgFrame2Copy, "Outside: " + std::to_string(carCountR), std::to_string(ShopCountR), ptTextBottomLeftPositionR, intFontFace, dblFontScale, SCALAR_YELLOW, intFontThickness);

}

How to calculate number of people counting inside/outside in shop?

Hello. How to calculate how many people are in the shop a lot ? Came out 7 people left 5 people in the shop left 2 people.

Could you tell me how it can be implemented?

#include<opencv2/core/core.hpp>
#include<opencv2/highgui/highgui.hpp>
#include<opencv2/imgproc/imgproc.hpp>

#include<iostream>
#include<conio.h>           // it may be necessary to change or remove this line if not using Windows
#include <fstream>      // file utils
#include <ctime>        // timestamp stuff
#include "Blob.h"

#define SHOW_STEPS            // un-comment or comment this line to show steps or not
#define FRAME_SCALE     1   // divide frame dimentions by this number

// global variables ///////////////////////////////////////////////////////////////////////////////
const cv::Scalar SCALAR_BLACK = cv::Scalar(0.0, 0.0, 0.0);
const cv::Scalar SCALAR_WHITE = cv::Scalar(255.0, 255.0, 255.0);
const cv::Scalar SCALAR_YELLOW = cv::Scalar(0.0, 255.0, 255.0);
const cv::Scalar SCALAR_GREEN = cv::Scalar(0.0, 200.0, 0.0);
const cv::Scalar SCALAR_RED = cv::Scalar(0.0, 0.0, 255.0);
const cv::Scalar SCALAR_BLUE = cv::Scalar(255.0, 0.0, 0.0);

// function prototypes ////////////////////////////////////////////////////////////////////////////
void matchCurrentFrameBlobsToExistingBlobs(std::vector<Blob> &existingBlobs, std::vector<Blob> &currentFrameBlobs);
void addBlobToExistingBlobs(Blob &currentFrameBlob, std::vector<Blob> &existingBlobs, int &intIndex);
void addNewBlob(Blob &currentFrameBlob, std::vector<Blob> &existingBlobs);
double distanceBetweenPoints(cv::Point point1, cv::Point point2);
void drawAndShowContours(cv::Size imageSize, std::vector<std::vector<cv::Point> > contours, std::string strImageName);
void drawAndShowContours(cv::Size imageSize, std::vector<Blob> blobs, std::string strImageName);
bool checkIfBlobsCrossedTheLine(std::vector<Blob> &blobs, int &intVerticalLinePosition, int &ShopCountL, int &ShopCountR, std::ofstream &myfile);
void drawBlobInfoOnImage(std::vector<Blob> &blobs, cv::Mat &imgFrame2Copy);
void drawShopCountOnImage(int &ShopCountL, int &ShopCountR, cv::Mat &imgFrame2Copy);

///////////////////////////////////////////////////////////////////////////////////////////////////
int main(void) {

    cv::VideoCapture capVideo;
    std::ofstream myfile; // log file

    cv::Mat imgFrame1;
    cv::Mat imgFrame2;
    cv::Mat imgFrame1L;
    cv::Mat imgFrame2L;

    std::vector<Blob> blobs;

    cv::Point crossingLine[2];

    int ShopCountL = 0;
    int ShopCountR = 0;

    capVideo.open("input1_2.MOV");
    //capVideo.open("rtsp://192.168.1.254/sjcam.mov");
    //capVideo.open(1);

    // log file
    myfile.open("/tmp/OpenCV-" + std::string() + "-" + std::to_string(time(0)) + ".txt");
    std::cout << "Logging to: \"/tmp/OpenCV-" << "-" << std::to_string(time(0)) << ".txt\"" << std::endl;

    myfile << "\"Timestamp\",\"Left\",\"Right\"" << std::endl;

    if (!capVideo.isOpened()) {                                                 // if unable to open video file
        std::cout << "error reading video file" << std::endl << std::endl;      // show error message
        _getch();                   // it may be necessary to change or remove this line if not using Windows
        return(0);                                                              // and exit program
    }

    if (capVideo.get(CV_CAP_PROP_FRAME_COUNT) < 2) {
        std::cout << "error: video file must have at least two frames";
        _getch();                   // it may be necessary to change or remove this line if not using Windows
        return(0);
    }

    capVideo.read(imgFrame1L);
    capVideo.read(imgFrame2L);

    resize(imgFrame1L, imgFrame1, cv::Size(imgFrame1L.size().width / FRAME_SCALE, imgFrame1L.size().height / FRAME_SCALE));
    resize(imgFrame2L, imgFrame2, cv::Size(imgFrame2L.size().width / FRAME_SCALE, imgFrame2L.size().height / FRAME_SCALE));


    //int intHorizontalLinePosition = (int)std::round((double)imgFrame1.rows * 0.35);
    int intVerticalLinePosition = (int)std::round((double)imgFrame1.cols * 0.50);

    crossingLine[0].y = 0;
    crossingLine[0].x = intVerticalLinePosition;

    crossingLine[1].y = imgFrame1.rows - 1;
    crossingLine[1].x = intVerticalLinePosition;

    char chCheckForEscKey = 0;

    bool blnFirstFrame = true;

    int frameCount = 2;

    while (capVideo.isOpened() && chCheckForEscKey != 27) {

        std::vector<Blob> currentFrameBlobs;

        cv::Mat imgFrame1Copy = imgFrame1.clone();
        cv::Mat imgFrame2Copy = imgFrame2.clone();

        cv::Mat imgDifference;
        cv::Mat imgThresh;

        cv::cvtColor(imgFrame1Copy, imgFrame1Copy, CV_BGR2GRAY);
        cv::cvtColor(imgFrame2Copy, imgFrame2Copy, CV_BGR2GRAY);

        cv::GaussianBlur(imgFrame1Copy, imgFrame1Copy, cv::Size(5, 5), 0);
        cv::GaussianBlur(imgFrame2Copy, imgFrame2Copy, cv::Size(5, 5), 0);

        cv::absdiff(imgFrame1Copy, imgFrame2Copy, imgDifference);

        cv::threshold(imgDifference, imgThresh, 30, 255.0, CV_THRESH_BINARY);

        //cv::imshow("imgThresh", imgThresh);

        cv::Mat structuringElement3x3 = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(3, 3));
        cv::Mat structuringElement5x5 = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(5, 5));
        cv::Mat structuringElement7x7 = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(7, 7));
        cv::Mat structuringElement15x15 = cv::getStructuringElement(cv::MORPH_RECT, cv::Size(15, 15));

        for (unsigned int i = 0; i < 2; i++) {
            cv::dilate(imgThresh, imgThresh, structuringElement5x5);
            cv::dilate(imgThresh, imgThresh, structuringElement5x5);
            cv::erode(imgThresh, imgThresh, structuringElement5x5);
        }

        cv::Mat imgThreshCopy = imgThresh.clone();

        std::vector<std::vector<cv::Point> > contours;

        cv::findContours(imgThreshCopy, contours, cv::RETR_EXTERNAL, cv::CHAIN_APPROX_SIMPLE);

        drawAndShowContours(imgThresh.size(), contours, "imgContours");

        std::vector<std::vector<cv::Point> > convexHulls(contours.size());

        for (unsigned int i = 0; i < contours.size(); i++) {
            cv::convexHull(contours[i], convexHulls[i]);
        }

        drawAndShowContours(imgThresh.size(), convexHulls, "imgConvexHulls");

        for (auto &convexHull : convexHulls) {
            Blob possibleBlob(convexHull);

            if (possibleBlob.currentBoundingRect.area() > 800 &&
                possibleBlob.dblCurrentAspectRatio > 0.2 &&
                possibleBlob.dblCurrentAspectRatio < 4.0 &&
                possibleBlob.currentBoundingRect.width > 40 &&
                possibleBlob.currentBoundingRect.height > 40 &&
                possibleBlob.dblCurrentDiagonalSize > 90.0 &&
                (cv::contourArea(possibleBlob.currentContour) / (double)possibleBlob.currentBoundingRect.area()) > 0.50) {
                currentFrameBlobs.push_back(possibleBlob);
            }
        }

        drawAndShowContours(imgThresh.size(), currentFrameBlobs, "imgCurrentFrameBlobs");

        if (blnFirstFrame == true) {
            for (auto &currentFrameBlob : currentFrameBlobs) {
                blobs.push_back(currentFrameBlob);
            }
        }
        else {
            matchCurrentFrameBlobsToExistingBlobs(blobs, currentFrameBlobs);
        }

        drawAndShowContours(imgThresh.size(), blobs, "imgBlobs");

        imgFrame2Copy = imgFrame2.clone();          // get another copy of frame 2 since we changed the previous frame 2 copy in the processing above

        drawBlobInfoOnImage(blobs, imgFrame2Copy);

        int blnAtLeastOneBlobCrossedTheLine = checkIfBlobsCrossedTheLine(blobs, intVerticalLinePosition, ShopCountL, ShopCountR, myfile);

        if (blnAtLeastOneBlobCrossedTheLine == 1) {
            cv::line(imgFrame2Copy, crossingLine[0], crossingLine[1], SCALAR_GREEN, 2);
        }
        else if (blnAtLeastOneBlobCrossedTheLine == 2) {
            cv::line(imgFrame2Copy, crossingLine[0], crossingLine[1], SCALAR_YELLOW, 2);
        }
        else {
            cv::line(imgFrame2Copy, crossingLine[0], crossingLine[1], SCALAR_BLUE, 2);
        }

        drawShopCountOnImage(ShopCountL, ShopCountR, imgFrame2Copy);

        cv::imshow("People_Counting_Cross_Line", imgFrame2Copy);

        //cv::waitKey(0);                 // uncomment this line to go frame by frame for debugging

        // now we prepare for the next iteration

        currentFrameBlobs.clear();

        imgFrame1 = imgFrame2.clone();           // move frame 1 up to where frame 2 is
        capVideo.read(imgFrame2);
        if ((capVideo.get(CV_CAP_PROP_POS_FRAMES) + 1) < capVideo.get(CV_CAP_PROP_FRAME_COUNT)) {
            capVideo.read(imgFrame2L);
            resize(imgFrame2L, imgFrame2, cv::Size(imgFrame2L.size().width / FRAME_SCALE, imgFrame2L.size().height / FRAME_SCALE));
        }
        else {
            time_t now = time(0);
            char* dt = strtok(ctime(&now), "\n");;
            std::cout << dt << ",EOF" << std::endl;
            return(0); // end?
        }

        blnFirstFrame = false;
        frameCount++;
        chCheckForEscKey = cv::waitKey(1);
    }

    if (chCheckForEscKey != 27) {               // if the user did not press esc (i.e. we reached the end of the video)
        cv::waitKey(0);                         // hold the windows open to allow the "end of video" message to show
    }
    // note that if the user did press esc, we don't need to hold the windows open, we can simply let the program end which will close the windows

    return(0);
}

///////////////////////////////////////////////////////////////////////////////////////////////////
void matchCurrentFrameBlobsToExistingBlobs(std::vector<Blob> &existingBlobs, std::vector<Blob> &currentFrameBlobs) {

    for (auto &existingBlob : existingBlobs) {

        existingBlob.blnCurrentMatchFoundOrNewBlob = false;

        existingBlob.predictNextPosition();
    }

    for (auto &currentFrameBlob : currentFrameBlobs) {

        int intIndexOfLeastDistance = 0;
        double dblLeastDistance = 100000.;

        for (unsigned int i = 0; i < existingBlobs.size(); i++) {

            if (existingBlobs[i].blnStillBeingTracked == true) {

                double dblDistance = distanceBetweenPoints(currentFrameBlob.centerPositions.back(), existingBlobs[i].predictedNextPosition);

                if (dblDistance < dblLeastDistance) {
                    dblLeastDistance = dblDistance;
                    intIndexOfLeastDistance = i;
                }
            }
        }

        if (dblLeastDistance < currentFrameBlob.dblCurrentDiagonalSize * 0.5) {
            addBlobToExistingBlobs(currentFrameBlob, existingBlobs, intIndexOfLeastDistance);
        }
        else {
            addNewBlob(currentFrameBlob, existingBlobs);
        }

    }

    for (auto &existingBlob : existingBlobs) {

        if (existingBlob.blnCurrentMatchFoundOrNewBlob == false) {
            existingBlob.intNumOfConsecutiveFramesWithoutAMatch++;
        }

        if (existingBlob.intNumOfConsecutiveFramesWithoutAMatch >= 5) {
            existingBlob.blnStillBeingTracked = false;
        }

    }

}

///////////////////////////////////////////////////////////////////////////////////////////////////
void addBlobToExistingBlobs(Blob &currentFrameBlob, std::vector<Blob> &existingBlobs, int &intIndex) {

    existingBlobs[intIndex].currentContour = currentFrameBlob.currentContour;
    existingBlobs[intIndex].currentBoundingRect = currentFrameBlob.currentBoundingRect;

    existingBlobs[intIndex].centerPositions.push_back(currentFrameBlob.centerPositions.back());

    existingBlobs[intIndex].dblCurrentDiagonalSize = currentFrameBlob.dblCurrentDiagonalSize;
    existingBlobs[intIndex].dblCurrentAspectRatio = currentFrameBlob.dblCurrentAspectRatio;

    existingBlobs[intIndex].blnStillBeingTracked = true;
    existingBlobs[intIndex].blnCurrentMatchFoundOrNewBlob = true;
}

///////////////////////////////////////////////////////////////////////////////////////////////////
void addNewBlob(Blob &currentFrameBlob, std::vector<Blob> &existingBlobs) {

    currentFrameBlob.blnCurrentMatchFoundOrNewBlob = true;

    existingBlobs.push_back(currentFrameBlob);
}

///////////////////////////////////////////////////////////////////////////////////////////////////
double distanceBetweenPoints(cv::Point point1, cv::Point point2) {

    int intX = abs(point1.x - point2.x);
    int intY = abs(point1.y - point2.y);

    return(sqrt(pow(intX, 2) + pow(intY, 2)));
}

///////////////////////////////////////////////////////////////////////////////////////////////////
void drawAndShowContours(cv::Size imageSize, std::vector<std::vector<cv::Point> > contours, std::string strImageName) {
    cv::Mat image(imageSize, CV_8UC3, SCALAR_BLACK);

    cv::drawContours(image, contours, -1, SCALAR_WHITE, -1);

    //cv::imshow(strImageName, image);
}

///////////////////////////////////////////////////////////////////////////////////////////////////
void drawAndShowContours(cv::Size imageSize, std::vector<Blob> blobs, std::string strImageName) {

    cv::Mat image(imageSize, CV_8UC3, SCALAR_BLACK);

    std::vector<std::vector<cv::Point> > contours;

    for (auto &blob : blobs) {
        if (blob.blnStillBeingTracked == true) {
            contours.push_back(blob.currentContour);
        }
    }

    cv::drawContours(image, contours, -1, SCALAR_WHITE, -1);

    //cv::imshow(strImageName, image);
}

///////////////////////////////////////////////////////////////////////////////////////////////////
bool checkIfBlobsCrossedTheLine(std::vector<Blob> &blobs, int &intVerticalLinePosition, int &ShopCountL, int &ShopCountR, std::ofstream &myfile) {
    bool blnAtLeastOneBlobCrossedTheLine = 0;

    for (auto blob : blobs) {

        if (blob.blnStillBeingTracked == true && blob.centerPositions.size() >= 2) {
            int prevFrameIndex = (int)blob.centerPositions.size() - 2;
            int currFrameIndex = (int)blob.centerPositions.size() - 1;

            //going left
            if (blob.centerPositions[prevFrameIndex].x > intVerticalLinePosition && blob.centerPositions[currFrameIndex].x <= intVerticalLinePosition) {
                ShopCountL++;

                time_t now = time(0);
                char* dt = strtok(ctime(&now), "\n");;
                std::cout << dt << ",1,0 (Left)" << std::endl;
                myfile << dt << ",1,0" << std::endl;
                blnAtLeastOneBlobCrossedTheLine = 1;
            }

            // going right
            if (blob.centerPositions[prevFrameIndex].x < intVerticalLinePosition && blob.centerPositions[currFrameIndex].x >= intVerticalLinePosition) {
                ShopCountR++;

                time_t now = time(0);
                char* dt = strtok(ctime(&now), "\n");;
                std::cout << dt << ",0,1 (Right)" << std::endl;
                myfile << dt << ",0,1" << std::endl;
                blnAtLeastOneBlobCrossedTheLine = 2;
            }

        }

    }

    return blnAtLeastOneBlobCrossedTheLine;
}

///////////////////////////////////////////////////////////////////////////////////////////////////
void drawBlobInfoOnImage(std::vector<Blob> &blobs, cv::Mat &imgFrame2Copy) {

    for (unsigned int i = 0; i < blobs.size(); i++) {

        if (blobs[i].blnStillBeingTracked == true) {
            cv::rectangle(imgFrame2Copy, blobs[i].currentBoundingRect, SCALAR_RED, 2);

            int intFontFace = CV_FONT_HERSHEY_SIMPLEX;
            double dblFontScale = blobs[i].dblCurrentDiagonalSize / 60.0;
            int intFontThickness = (int)std::round(dblFontScale * 1.0);

            cv::putText(imgFrame2Copy, std::to_string(i), blobs[i].centerPositions.back(), intFontFace, dblFontScale, SCALAR_GREEN, intFontThickness);
        }
    }
}

///////////////////////////////////////////////////////////////////////////////////////////////////
void drawShopCountOnImage(int &ShopCountL, int &ShopCountR, cv::Mat &imgFrame2Copy) {

    int intFontFace = CV_FONT_HERSHEY_SIMPLEX;
    double dblFontScale = (imgFrame2Copy.rows * imgFrame2Copy.cols) / 200000.0;
    int intFontThickness = (int)std::round(dblFontScale * 1.5);

    cv::Size textSizeL = cv::getTextSize("Inside: " + std::to_string(ShopCountL), intFontFace, dblFontScale, intFontThickness, 0);
    cv::Size textSizeR = cv::getTextSize("Outside: " + std::to_string(ShopCountR), intFontFace, dblFontScale, intFontThickness, 0);

    cv::Point ptTextBottomLeftPositionL, ptTextBottomLeftPositionR;

    ptTextBottomLeftPositionL.x = imgFrame2Copy.cols - 1 - (int)((double)textSizeL.width * 1.25);
    ptTextBottomLeftPositionL.y = (int)((double)textSizeL.height * 1.25);

    ptTextBottomLeftPositionR.x = ptTextBottomLeftPositionL.x - 350;
    ptTextBottomLeftPositionR.y = ptTextBottomLeftPositionL.y + (textSizeL.height) * 1.25;

    cv::putText(imgFrame2Copy, "Inside: " + std::to_string(ShopCountL), ptTextBottomLeftPositionL, intFontFace, dblFontScale, SCALAR_GREEN, intFontThickness);
    cv::putText(imgFrame2Copy, "Outside: " + std::to_string(ShopCountR), ptTextBottomLeftPositionR, intFontFace, dblFontScale, SCALAR_YELLOW, intFontThickness);

}