使用 OSVR 将 Opencv Mat 转换为 opengl

Opencv Mat to opengl with OSVR

我对 opencv 和 opengl 有疑问。我需要用 opengl 显示网络摄像头用 opencv 检索到的图像,并将其放入 Razer OSVR。但是使用我当前的代码,帧速率大约为 1 fps 或 2 fps,而且我不知道我做错了什么。这是我的代码,我认为错误的地方在 draw_cube() 函数中。

Main.cpp

// Internal Includes

#include <osvr/ClientKit/ClientKit.h>
#include <osvr/ClientKit/Display.h>
#include "SDL2Helpers.h"

#include "OpenGLCube.h"

#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/opencv.hpp>


// Library/third-party includes
#include <SDL.h>
#include <SDL_opengl.h>

// Standard includes
#include <iostream>

static auto const WIDTH = 1920;
static auto const HEIGHT = 1080;

// Forward declarations of rendering functions defined below.
void render(osvr::clientkit::DisplayConfig &disp);

int main(int argc, char *argv[]) {
    namespace SDL = osvr::SDL2;

    // Open SDL
    SDL::Lib lib;

    // Use OpenGL 2.1
    SDL_GL_SetAttribute(SDL_GL_CONTEXT_MAJOR_VERSION, 2);
    SDL_GL_SetAttribute(SDL_GL_CONTEXT_MINOR_VERSION, 1);

    // Create a window
    auto window = SDL::createWindow("OSVR", SDL_WINDOWPOS_UNDEFINED,
                                SDL_WINDOWPOS_UNDEFINED, WIDTH, HEIGHT,
                                SDL_WINDOW_OPENGL | SDL_WINDOW_SHOWN);
    if (!window) {
        std::cerr << "Could not create window: " << SDL_GetError() << std::endl;
        return -1;
    }

    // Create an OpenGL context and make it current.
    SDL::GLContext glctx(window.get());

    // Turn on V-SYNC
    SDL_GL_SetSwapInterval(1);

    // Start OSVR and get OSVR display config
    osvr::clientkit::ClientContext ctx("com.osvr.example.SDLOpenGL");
    osvr::clientkit::DisplayConfig display(ctx);
    if (!display.valid()) {
        std::cerr << "\nCould not get display config (server probably not "
                 "running or not behaving), exiting."
              << std::endl;
        return -1;
    }

    std::cout << "Waiting for the display to fully start up, including "
             "receiving initial pose update..."
          << std::endl;
    while (!display.checkStartup()) {
        ctx.update();
    }
    std::cout << "OK, display startup status is good!" << std::endl;

    // Event handler
    SDL_Event e;
#ifndef __ANDROID__ // Don't want to pop up the on-screen keyboard
    SDL::TextInput textinput;
#endif
    bool quit = false;
    while (!quit) {
        // Handle all queued events
        while (SDL_PollEvent(&e)) {
            switch (e.type) {
            case SDL_QUIT:
                // Handle some system-wide quit event
                quit = true;
                break;
            case SDL_KEYDOWN:
                if (SDL_SCANCODE_ESCAPE == e.key.keysym.scancode) {
                    // Handle pressing ESC
                    quit = true;
                }
                break;
            }
            if (e.type == SDL_QUIT) {
                quit = true;
            }
        }

        // Update OSVR
        ctx.update();

        // Render
        render(display);

        // Swap buffers
        SDL_GL_SwapWindow(window.get());
    }

    return 0;
}


void render(osvr::clientkit::DisplayConfig &disp) {

    /// For each viewer, eye combination...
    disp.forEachEye([](osvr::clientkit::Eye eye) {

      /// For each display surface seen by the given eye of the given
    /// viewer...
        eye.forEachSurface([](osvr::clientkit::Surface surface) {
            auto viewport = surface.getRelativeViewport();
            glViewport(static_cast<GLint>(viewport.left),
                   static_cast<GLint>(viewport.bottom),
                   static_cast<GLsizei>(viewport.width),
                   static_cast<GLsizei>(viewport.height));

        glLoadIdentity();

   cv::VideoCapture cap(0); // open the default camera

   cv::Mat img;
   cap >> img; // get a new frame from camera

     cv::flip(img,img,0);

//resize(img, img, Size(160, 140), 0, 0, INTER_CUBIC);

draw_cube(img);
        });
    });

}

OpenGLCube.h

#ifndef INCLUDED_OpenGLVIDEO_h
#define INCLUDED_OpenGLVIDEO_h_

#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/opencv.hpp>

using namespace cv;


GLuint texture;

void draw_cube(cv::Mat img) 
{

glGenTextures(1, &texture);

glBindTexture(GL_TEXTURE_2D, texture);

glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);


glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB,img.size().width,img.size().height, 0, GL_BGR,GL_UNSIGNED_BYTE,img.data);


glEnable(GL_TEXTURE_2D);

glBegin(GL_QUADS);
glTexCoord2d(0.0, 1.0);
glVertex2d(-1,1);
glTexCoord2d(0.0, 0.0);
glVertex2d(-1,-1);
glTexCoord2d(1.0, 0.0);
glVertex2d(1,-1);
glTexCoord2d(1.0, 1.0);
glVertex2d(1,1);
glEnd();

glDisable(GL_TEXTURE_2D);

glDeleteTextures(1, &texture);
}

#endif 

谢谢。

glGenTextures 应该在主循环之前调用一次,因为您不需要每帧都创建一个全新的纹理。您可以像现在一样使用 glTexImage2D 使用相同的纹理对象简单地覆盖以前的数据。同样,作为一个好的约定,glDeleteTextures应该在退出程序之前调用一次。

例如,应在主循环之外完成以下操作:

glGenTextures(1, &texture);

glBindTexture(GL_TEXTURE_2D, texture);

glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_LINEAR);

当然,您需要在主循环和绘制函数之外声明 texture 并将其传入。

这一行: cv::VideoCapture cap(0); // open the default camera

它不能在主循环中。

感谢您的回复。