Opencv C ++网络摄像头程序与C服务器之间正确的进程间通信方法

时间:2019-01-03 21:57:10

标签: c opencv server ipc

考虑一个简单的OpenCV程序来捕获摄像头帧:

#ifndef __OPENCV__
#define __OPENCV__
#include "opencv2/opencv.hpp"
#endif
#include <iostream>
#include "utils.hpp"
#include "constants.hpp"
#include <unistd.h>
#include <vector>
#include "InputStateContext.hpp"
#include <SDL.h>
#include <SDL_events.h>
#include "MiddlewareConnector.hpp"

using namespace cv;
using namespace std;

void onTrackbar_changed(int, void* data);
//void onThreshold_changed(int, void* data);
void onMouse(int evt, int x, int y, int flags, void* param);
int keyboardCallback(SDL_KeyboardEvent ev);
int mouseCallback(SDL_MouseButtonEvent ev, float scaleX, float scaleY, Mat frame);

InputStateContext context;

int main(int argc, char* argv[])
{
  printVersion();
    /* Initialise SDL */
    if( SDL_Init( SDL_INIT_VIDEO ) < 0)
    {
        fprintf( stderr, "Could not initialise SDL: %s\n", SDL_GetError() );
        exit( -1 );
    }

    string host;
    unsigned int port;
    const String sourceReference = argv[1];
    int camNum;
    string sensorName;
    try
    {
        camNum = stoi(sourceReference); // throws std::length_error
    }
    catch (const std::exception& e)// reference to the base of a polymorphic object
    {
        std::cout<<"Exception: " << e.what()<<endl; // information from length_error printed
        return -1;
    }
    if (argc>4)
    {
        try
        {
            host = argv[2];
            port = atoi(argv[3]);
            sensorName = argv[4];
        }
        catch (const std::exception& e)
        {
            cout<<"impossible to convert host or port"<<endl;
            return -1;
        }
    }
    else if(argc>2)
    {
        cout<<"argumetns less than 4"<<endl;
        host = "http://localhost";
        port = 3000;

        sensorName = argv[2];
        cout<<argc<<endl;
        cout<<"sensor name set from arguments: "<< sensorName<<endl;

    }
    else
    {
        cout<<"stopping execution: too few arguments."<<endl;
        return -1;
    }
    MiddlewareConnector middleware(host, port, sensorName, &context);
    context.Attach(&middleware);
    context.Notify(); //register on middleware
    cout<<"camera initializing\n";
    VideoSettings cam(camNum + CAP_V4L);
    cout<<"camera initialized\n";
    /* or
    VideoCapture captUndTst;
    captUndTst.open(sourceCompareWith);*/
    cout<<"Ch3ck c4m3ra is 0p3n3d\n";
    if ( !cam.isOpened())
    {
        cout << "Could not open reference " << sourceReference << endl;
        return -1;
    }

    cout<<"===================================="<<endl<<endl;
    cout<<"Default Brightness %-------> "<<cam.getBrightness()<<endl;
    cout<<"Default Contrast %---------> "<<cam.getContrast()<<endl;
    cout<<"Default Saturation %-------> "<<cam.getSaturation()<<endl;
    cout<<"Default Gain %-------------> "<<cam.getGain()<<endl;
    cout<<"Default hue %--------------> "<<cam.getHue()<<endl<<endl;
    cout<<"====================================\n\n"<<endl;
    Mat frame;
    cam>>frame;
    //resize(frame, frame, cv::Size(frame.cols/WINDOW_SCALE, frame.rows/WINDOW_SCALE));
    //resizeWindow("Camera", cv::Size(frame.cols/WINDOW_SCALE, frame.rows/WINDOW_SCALE));
    SDL_Window* win = SDL_CreateWindow("Camera", 100, 100, frame.cols, frame.rows,
        SDL_WINDOW_SHOWN | SDL_WINDOW_RESIZABLE);
    int oldWidth = frame.cols, oldHeight= frame.rows;
    int width, height;
    float scaleX=1, scaleY=1;
    SDL_SetWindowTitle(win, "Camera");
    SDL_Renderer * renderer = SDL_CreateRenderer(win, -1, 0);
    /*
    createTrackbar( "Brightness","Camera", &cam.brightness, 100, onTrackbar_changed, &cam );
    createTrackbar( "Contrast","Camera", &cam.contrast, 100,onTrackbar_changed, &cam );
    createTrackbar( "Saturation","Camera", &cam.saturation, 100,onTrackbar_changed, &cam);
    createTrackbar( "Gain","Camera", &cam.gain, 100,onTrackbar_changed, &cam);
    createTrackbar( "Hue","Camera", &cam.hue, 100,onTrackbar_changed, &cam);
    */
    SDL_Event genericEvent;

    //setMouseCallback("Camera", onMouse, &frame);
    SDL_Surface* frameSurface;
    SDL_Texture* frameTexture;
    cv::Size blur_kernel = cv::Size(5, 5);
    while(cam.isOpened())
    {
        while( SDL_PollEvent(&genericEvent) )
        {
            //cout<<genericEvent.type<<endl;
            switch( genericEvent.type )
            {
                /* Keyboard event */
                /* Pass the event data onto PrintKeyInfo() */

                case SDL_KEYDOWN:
                    break;
                case SDL_KEYUP:
                    keyboardCallback(genericEvent.key);
                    break;
                case SDL_MOUSEBUTTONDOWN:
                {
                    mouseCallback(genericEvent.button, scaleX, scaleY, frame);
                    break;
                }
                case SDL_WINDOWEVENT:
                {
                    if (genericEvent.window.event==SDL_WINDOWEVENT_RESIZED)
                    {
                        //oldWidth = width;
                        //oldHeight = height;
                        SDL_GetWindowSize(win, &width, &height);
                        scaleX =  (float)(width)/ float(oldWidth);
                        scaleY = (float)(height) / (float)(oldHeight);
                    }
                    break;
                }
                /* SDL_QUIT event (window close) */
                case SDL_QUIT:
                    return 0;
                    break;
                default:
                    break;
            }
        }
        Mat blurred_frame, frame_out;
        frame_out = frame.clone();
        cv::cvtColor(frame, blurred_frame, cv::COLOR_BGR2GRAY);
        cv::GaussianBlur(blurred_frame, blurred_frame, blur_kernel, 3, 3);
        Mat roi, laplacian;
        Scalar delta;
        for (int ii=0; ii< context.getPolysNumber(); ii++)
        {
            roi = blurred_frame(context.getParks().at(ii).getBoundingRect());
            cv::Laplacian(roi, laplacian, CV_64F);
            delta = cv::mean(cv::abs(laplacian), context.getParks().at(ii).getMask());
            context.setParkingStatus(ii,  abs(delta[0] - context.getParks().at(ii).getThreshold())<0.35 );
        }
        cam>>frame;
        // /resize(frame_out, frame_out, cv::Size(frame.cols/WINDOW_SCALE, frame.rows/WINDOW_SCALE));
        context.draw(frame_out);

        //Convert to SDL_Surface
        frameSurface = SDL_CreateRGBSurfaceFrom((void*)frame_out.data,
            frame_out.size().width, frame_out.size().height,
            24, frame_out.cols *3,
            0xff0000, 0x00ff00, 0x0000ff, 0);

        if(frameSurface == NULL)
        {
            SDL_Log("Couldn't convert Mat to Surface.");
            return -2;
        }

        //Convert to SDL_Texture
        frameTexture = SDL_CreateTextureFromSurface(renderer, frameSurface);
        if(frameTexture == NULL)
        {
            SDL_Log("Couldn't convert Mat(converted to surface) to Texture."); //<- ERROR!!
            return -1;
        }
        //imshow("Camera", frame_out);
        SDL_RenderCopy(renderer, frameTexture, NULL, NULL);
        SDL_RenderPresent(renderer);
        /* A delay is needed to show (it actually wait for an input)*/
        if(waitKey(delay)>delay){;}

    }
    SDL_DestroyTexture(frameTexture);
    SDL_FreeSurface(frameSurface);
    SDL_DestroyRenderer(renderer);
    SDL_DestroyWindow(win);
    return 0;
}

void onTrackbar_changed(int, void* data)
{
 VideoSettings cam = *((VideoSettings*)data);
 cam.update();
}

void onMouse(int evt, int x, int y, int flags, void* param)
{

    if(evt == EVENT_LBUTTONDOWN)
    {
      context.mouseLeft( x, y);

    }
    else if(evt == EVENT_RBUTTONDOWN)
    {
        try
        {
            Mat* fr = (Mat* ) param;
            context.setFrame(*fr);
        }
        catch (const std::exception& e)
        {
            cout<<"onMouse frame not converted"<<endl;
        }
        context.mouseRight();
    }
}

int keyboardCallback(SDL_KeyboardEvent ev)
{
    switch(ev.keysym.sym)
    {
        case(SDLK_a):
        {
            cout<<"calling context keyboardA"<<endl;
            context.keyboardA();
            break;
        }
        case(SDLK_e):
        {
            cout<<"calling context keyboardE"<<endl;
            context.keyboardE();
            break;
        }
        case(SDLK_m):
        {
            cout<<"calling context keyboardM"<<endl;
            context.keyboardM();
            break;
        }
        case SDLK_UP:
        case SDLK_RIGHT:
        {
            cout<<"calling context RIGHT ARROW"<<endl;
            context.keyboardArrows(1);
            break;
        }
        case SDLK_DOWN:
        case SDLK_LEFT:
        {
            cout<<"calling context LEFT ARROW"<<endl;
            context.keyboardArrows(-1);
            break;
        }
        case (SDLK_RETURN):
        {
            cout<<"calling context RETURN ARROW"<<endl;
            context.keyboardReturn();
            break;
        }
        default:
        break;

    }
    return 0;
}


int mouseCallback(SDL_MouseButtonEvent ev, float scaleX, float scaleY, Mat frame)
{
    if(ev.button == SDL_BUTTON_LEFT)
    {
        cout<<scaleX<<" "<<scaleY<<endl;
        int scaled_x =  static_cast<int> ((float)(ev.x)/scaleX);
        int scaled_y = static_cast<int> ((float)(ev.y)/ scaleY);
        std::cout<<"scaled x: "<<scaled_x<<", scaled y: "<<scaled_y<<endl;
        context.mouseLeft( scaled_x,scaled_y);
    }
    else if(ev.button == SDL_BUTTON_RIGHT)
    {
        try
        {
            //Mat* fr = (Mat* ) param;
            context.setFrame(frame);
        }
        catch (const std::exception& e)
        {
            cout<<"onMouse frame not converted"<<endl;
        }
        context.mouseRight();
    }
}

我有一个单独的服务器/客户端应用程序(使用C语言)(当前使用TCP,但我将其更改为UDP),其中服务器将在从网络摄像头流式传输的程序的同一主机中运行。我希望在客户端连接时,服务器获取摄像头框架(例如,每n秒`),然后通过websocket将其发送给客户端。

为了简便起见,我考虑将两个部分分开住,因为一个可能没有另一个而存在。

但是,我并不确定如何使用标准POSIX方法使网络摄像头程序和服务器程序进行通信(可能是双向通信)。

起初,我想从Opencv程序调用{​​{1}},然后运行fork()启动服务器程序。但是,服务器程序已编程为成为守护进程,因此我无法使用典型的父子进程通信(exec)。

我可能会从服务器程序调用{​​{1}},然后使用openCV程序运行pipe,以便它们具有父子关系,从而可以使用管道。但是我认为这是不正确的。

其他解决方案可以是:

  • FIFO(命名管道)
  • 消息队列
  • 共享内存(这种方式是否可能会导致内存问题?)

1 个答案:

答案 0 :(得分:1)

根据我在评论中所说的内容,建议您考虑使用 Redis ,它是一种非常快速的内存中数据结构服务器。它可以提供字符串,哈希,列表,集合,队列等。它安装简单,占地面积小。可以从C / C ++,bash,Perl,Python,PHP,Java,Ruby和其他语言进行访问。

此外,它是联网的,因此您可以将数据从一个或多个主机转储到其中,并从任何其他主机以任何其他语言收集数据。

因此,这是一个示例生成器,可生成1000张图像并将它们尽可能快地填充到 Redis 中,并命名帧f-0f-1,{{1} }。每个帧都有一个“生存时间” ,这样它会在10秒后自动删除,并且不会在内存中停留太长时间。

f-2

中间的#!/bin/bash ################################################################################ # generate - generate "video" frames and dump into Redis ################################################################################ # Redis host host=127.0.0.1 # Clear out Redis of data from any previous run redis-cli -h $host flushall > /dev/null # Generate 1000 frames, with Redis key "f-0", "f-1", "f-2" also setting expiration ttl=10 for ((i=0;i<1000;i++)) ; do frameName=f-$i echo Generating frame: $frameName convert -size 640x480 -background magenta -fill white -gravity center label:"Frame: $i" JPG: | redis-cli -h $host -x setex "$frameName" $ttl > /dev/null redis-cli -h $host set currFrame $i > /dev/null done 命令是 ImageMagick ,它只是生成一个“视频”帧,如下所示:

enter image description here


然后有一个convert脚本,用于检查 Redis 正在使用的内存,如下所示:

monitor

然后是一个#!/bin/bash ################################################################################ # monitor ################################################################################ host=127.0.0.1 while :; do redis-cli -h $host -r 100 -i 1 info | grep used_memory: done 脚本,它从 Redis 获取最新帧并显示:

latest

以下是视频:

enter image description here