我是否必须锁定刚刚枚举的向量,或者仅在更改时才这样做?

时间:2019-03-23 11:23:01

标签: c++ vector mutex

我更改并获取矢量元素。 因此,当线程正在运行时,我使用互斥锁来更改矢量的元素。

但是如果我只想枚举向量的元素,我是否仍然必须锁定向量?

代码如下:

#include <napi.h>

#include "facedetect.h"

#include "opencv2/objdetect.hpp"
#include "opencv2/highgui.hpp"
#include "opencv2/imgproc.hpp"
#include <iostream>
#include <thread>

using namespace std;
using namespace cv;

std::mutex facesMutex;

string cascadeName = "/usr/local/share/opencv4/haarcascades/haarcascade_frontalface_alt.xml";;

bool running = true;
vector<Rect> faces;


class FaceDetectWorker : public Napi::AsyncWorker {
public:
    FaceDetectWorker(Napi::Function& callback, string url)
            : Napi::AsyncWorker(callback), url(url) {
    }

    ~FaceDetectWorker() {

    }

    vector<Rect> detectAndDraw( Mat& img, CascadeClassifier& cascade)
    {
        double t = 0;
        vector<Rect> faces;
        Mat gray, smallImg;

        cvtColor( img, gray, COLOR_BGR2GRAY );

        //double scale = 1;
        // scale = 1, fx =  1 / scale
        double fx = 1;
        resize( gray, smallImg, Size(), fx, fx, INTER_LINEAR_EXACT );
        equalizeHist( smallImg, smallImg );

        t = (double)getTickCount();
        cascade.detectMultiScale( smallImg, faces,
                                  1.1, 2, 0
                                          //|CASCADE_FIND_BIGGEST_OBJECT
                                          //|CASCADE_DO_ROUGH_SEARCH
                                          |CASCADE_SCALE_IMAGE,
                                  Size(30, 30) );

        t = (double)getTickCount() - t;
        printf( "detection time = %g ms\n", t*1000/getTickFrequency());
        return faces;
    }

    // Executed inside the worker-thread.
    // It is not safe to access JS engine data structure
    // here, so everything we need for input and output
    // should go on `this`.
    void Execute () {
        Mat frame, image;

        VideoCapture capture;

        CascadeClassifier cascade;

        if (!cascade.load(samples::findFile(cascadeName)))
        {
            Napi::AsyncWorker::SetError("ERROR: Could not load classifier cascade");
            return;
        }

        if (!capture.open(url))
        {
            Napi::AsyncWorker::SetError("ERROR: Error opening video stream " + url);
            return;
        }

        if( capture.isOpened() )
        {
            cout << "Video capturing has been started ..." << endl;

            try {
                while(running) {
                    capture >> frame;

                    if( frame.empty()) {
                        continue;
                    }

                    Mat frame1 = frame.clone();
                    vector<Rect> facesResult = detectAndDraw( frame1, cascade);

                    facesMutex.lock();
                    faces = facesResult;
                    facesMutex.unlock();

                    std::this_thread::sleep_for(std::chrono::milliseconds(30));

                }
            } catch (std::exception &e) {
                facesMutex.unlock();
                Napi::AsyncWorker::SetError(e.what());
            }
        }

    }

    // Executed when the async work is complete
    // this function will be run inside the main event loop
    // so it is safe to use JS engine data again
    void OnOK() {
        Napi::HandleScope scope(Env());
        Callback().Call({Env().Undefined(), Env().Undefined()});
    }

private:
    string url;
};


// Asynchronous access to the `Estimate()` function
Napi::Value FaceDetect(const Napi::CallbackInfo& info) {

    Napi::String url = info[0].As<Napi::String>().ToString();
    Napi::Function callback = info[1].As<Napi::Function>();

    FaceDetectWorker* faceDetectWorker = new FaceDetectWorker(callback, url);
    faceDetectWorker->Queue();
    return info.Env().Undefined();
}


Napi::Array FaceDetectGet(const Napi::CallbackInfo &info) {

    Napi::Array faceArray = Napi::Array::New(info.Env(), faces.size());

    facesMutex.lock();
    for(int i = 0; i < faces.size(); i++) {
        Rect rect = faces[i];
        Napi::Object obj = Napi::Object::New(info.Env());

        obj.Set("x", rect.x);
        obj.Set("y", rect.y);
        obj.Set("width", rect.width);
        obj.Set("height", rect.height);

        faceArray[i] = obj;
    }
    facesMutex.unlock();

    return faceArray;
}

所以问题是,如果我使用仅枚举向量的FaceDetectGet,我应该对其进行锁定和解锁吗?

1 个答案:

答案 0 :(得分:0)

实际上,解决方案是使用共享互斥锁。

代码如下:

#include <napi.h>

#include "facedetect.h"

#include "opencv2/objdetect.hpp"
#include "opencv2/highgui.hpp"
#include "opencv2/imgproc.hpp"
#include <iostream>
#include <thread>

#include <mutex>  // For std::unique_lock
#include <shared_mutex>


// https://stackoverflow.com/questions/55313194/do-i-have-to-lock-a-vectors-that-i-just-enumerate-or-i-only-do-it-when-i-change?noredirect=1#comment97357425_55313194

using namespace std;
using namespace cv;

std::shared_mutex _facesMutex;

string cascadeName = "/usr/local/share/opencv4/haarcascades/haarcascade_frontalface_alt.xml";;

bool running = true;
vector<Rect> faces;


class FaceDetectWorker : public Napi::AsyncWorker {
public:
    FaceDetectWorker(Napi::Function& callback, string url, int skip, int sleep)
            : Napi::AsyncWorker(callback), url(url), skip(skip), sleep(sleep) {
    }

    ~FaceDetectWorker() {

    }

    vector<Rect> detectFaces(Mat &img, CascadeClassifier &cascade)
    {
        double t = 0;
        vector<Rect> faces;
        Mat gray, smallImg;

        cvtColor( img, gray, COLOR_BGR2GRAY );

        //double scale = 1;
        // scale = 1, fx =  1 / scale
        double fx = 1;
        //resize( gray, smallImg, Size(), fx, fx, INTER_LINEAR_EXACT );
        //equalizeHist( gray, smallImg );

        //t = (double)getTickCount();
        cascade.detectMultiScale( gray, faces,
                                  1.1, 2, 0
                                          //|CASCADE_FIND_BIGGEST_OBJECT
                                          //|CASCADE_DO_ROUGH_SEARCH
                                          |CASCADE_SCALE_IMAGE,
                                  Size(30, 30) );

        //t = (double)getTickCount() - t;
        //printf( "detection time = %g ms\n", t*1000/getTickFrequency());
        return faces;
    }

    // Executed inside the worker-thread.
    // It is not safe to access JS engine data structure
    // here, so everything we need for input and output
    // should go on `this`.
    void Execute () {
        running = true;
        Mat frame, image;

        VideoCapture capture;

        CascadeClassifier cascade;

        if (!cascade.load(samples::findFile(cascadeName)))
        {
            Napi::AsyncWorker::SetError("ERROR: Could not load classifier cascade");
            return;
        }

        if (!capture.open(url))
        {
            Napi::AsyncWorker::SetError("ERROR: Error opening video stream " + url);
            return;
        }

        if( capture.isOpened() )
        {
            cout << "Video capturing has been started ..." << endl;

            try {
                int skipCount = 0;

                while(running) {


                    //capture.read(frame);
                    capture >> frame;

                    if( frame.empty()) {
                        continue;
                    }

                    skipCount++;

                    //cout<< "sleep " << sleep << " skip " << skip << endl;
                    if (skipCount >= skip) {
                        //cout<< "calculation " << skipCount << endl;
                        skipCount = 0;
                        Mat frame1 = frame.clone();
                        vector<Rect> facesResult = detectFaces(frame1, cascade);

                        std::unique_lock lock(_facesMutex);
                        faces = facesResult;
                        lock.unlock();
                    }

                    //waitKey(250);
                    std::this_thread::sleep_for(std::chrono::milliseconds(sleep));

                }
            } catch (std::exception &e) {
                Napi::AsyncWorker::SetError(e.what());
            }
        } else {
            Napi::AsyncWorker::SetError("ERROR: Could not open video camera " + url);
        }

    }

    // Executed when the async work is complete
    // this function will be run inside the main event loop
    // so it is safe to use JS engine data again
    void OnOK() {
        Napi::HandleScope scope(Env());
        Callback().Call({Env().Undefined(), Env().Undefined()});
    }

private:
    string url;
    int skip = 3;
    int sleep = 30;
};


// Asynchronous access to the `Estimate()` function
Napi::Value FaceDetect(const Napi::CallbackInfo& info) {

    Napi::String url = info[0].As<Napi::String>();
    Napi::Number skip = info[1].As<Napi::Number>();
    Napi::Number sleep = info[2].As<Napi::Number>();
    Napi::Function callback = info[3].As<Napi::Function>();

    FaceDetectWorker* faceDetectWorker = new FaceDetectWorker(callback, url, skip, sleep);
    faceDetectWorker->Queue();
    return info.Env().Undefined();
}


Napi::Array FaceDetectGet(const Napi::CallbackInfo &info) {

    Napi::Array faceArray = Napi::Array::New(info.Env(), faces.size());

    std::shared_lock lock(_facesMutex);
    vector<Rect> faces2 = faces;
    lock.unlock();
    for(int i = 0; i < faces2.size(); i++) {
        Rect rect = faces[i];
        Napi::Object obj = Napi::Object::New(info.Env());

        obj.Set("x", rect.x);
        obj.Set("y", rect.y);
        obj.Set("width", rect.width);
        obj.Set("height", rect.height);

        faceArray[i] = obj;
    }

    return faceArray;
}

void FaceDetectCancel(const Napi::CallbackInfo &info) {
    running = false;
}