OpenCV-OpenGL互操作性

nwsw7zdq  于 2022-11-04  发布在  其他
关注(0)|答案(2)|浏览(175)

我在Linux下编译了OpenCV 2.4.4,支持OpenGL,但是我不知道如何使用opengl_interop.hpp函数(其中一些甚至没有文档记录!,至少在我的文档版本上是这样的)查看启用OpenGL的部分中的windowidocpp,我发现了一些关于使用函数setOpenGLContext的提示,setOpenGLDrawCallback和updateView,但我甚至无法使用这段非常简单的代码:


# include <opencv2/opencv.hpp>

# include <GL/gl.h>

# include <GL/glut.h>

# include <opencv2/core/opengl_interop.hpp>

using namespace cv;

void on_opengl(void* userdata);

int main(void)
{
    VideoCapture webcam(CV_CAP_ANY);
    Mat frame;
    namedWindow("window", CV_WINDOW_OPENGL);
    setOpenGlContext("window");
    while(waitKey(30) < 0)
    {
        webcam >> frame;
        setOpenGlDrawCallback("window", on_opengl);
        imshow("window", frame);
        updateWindow("window");
    }

    return 0;
}

void on_opengl(void* userdata)
{
    glLoadIdentity();

    glTranslated(0.0, 0.0, 1.0);

    glRotatef( 55, 1, 0, 0 );
    glRotatef( 45, 0, 1, 0 );
    glRotatef( 0, 0, 0, 1 );

    static const int coords[6][4][3] = {
        { { +1, -1, -1 }, { -1, -1, -1 }, { -1, +1, -1 }, { +1, +1, -1 } },
        { { +1, +1, -1 }, { -1, +1, -1 }, { -1, +1, +1 }, { +1, +1, +1 } },
        { { +1, -1, +1 }, { +1, -1, -1 }, { +1, +1, -1 }, { +1, +1, +1 } },
        { { -1, -1, -1 }, { -1, -1, +1 }, { -1, +1, +1 }, { -1, +1, -1 } },
        { { +1, -1, +1 }, { -1, -1, +1 }, { -1, -1, -1 }, { +1, -1, -1 } },
        { { -1, -1, +1 }, { +1, -1, +1 }, { +1, +1, +1 }, { -1, +1, +1 } }
    };

    for (int i = 0; i < 6; ++i) {
                glColor3ub( i*20, 100+i*10, i*42 );
                glBegin(GL_QUADS);
                for (int j = 0; j < 4; ++j) {
                        glVertex3d(0.2*coords[i][j][0], 0.2 * coords[i][j][1], 0.2*coords[i][j][2]);
                }
                glEnd();
    }
}

在网络视频流中使用opengl的正确方法是什么?

jm81lzqq

jm81lzqq1#

OpenGL是为渲染图形而设计的,OpenCV是为计算机视觉而设计的。因此,我建议你在基于GL的应用程序中使用CV,而不是使用CV API来渲染、回调等。
如果你想要的只是一个简单的演示,那么你可以使用freeGLUT编写一个非常简单的程序,带有一些回调,freeGLUT将处理窗口回调和GL上下文创建。(GLFW或Qt也可以)在程序中,使用cv::ogl::Texture2D类来处理纹理对象。使用Texture2D::copyFrom(...)Texture2D::copyTo(...)来处理设备/主机内存传输。在渲染回调中,使用标准的GL程序来绘制一个全屏矩形。2虽然这个方法效率不高,但它可以工作。


# include <GL/glew.h>

# include <GL/freeglut.h>

# include <opencv2/opencv.hpp>

# include <opencv2/core/opengl_interop.hpp>

using namespace cv;

//Global vars
Texture2D g_img;
void timer_cb( int )
{
    //...Update the content of g_img
}

void resize_cb( int w, int h ) { /*...*/ }
void render_cb() {
    /* ...render g_img here */
    g_img.bind();

# ifdef USE_FIXED_PIPELINE

//use fixed pipeline for old-school GL rendering
    glMatrixMode( GL_MODELVIEW );
    //Do some transformation
    glBegin(GL_QUADS);
        glTexCoord(...);
        glVertex**(...);
        ...
    glEnd();

# else

//use shaders and VBOs for 3.1+ GL
    glBindProgram( ... );
    glBindBuffer( ... );
    glVertexAttribPointer( ... );
    glDrawArrays( ... );

# endif

}
int main( int argc, char**argv )
{
    //...init GLUT, GLEW and other stuff
    glutMainLoop();
    return 0;
}

注意事项:
1.建议使用freeGLUT而不是GLUT,它们是两件事。GLUT已经过时了。但是freeGLUT在扩展GLUT的同时继续支持最新的OpenGL版本。
1.您可能需要像GLEW这样的GL loading library来获取GL函数指针
1.较新的OpenGL(3.1+)不再支持固定管线渲染,因此需要VBO和着色器。如果您的目标是较低版本的GL,则需要指定上下文版本。这可以通过glutInitContextVersion( int major, int minor )完成。网上有很多教程。

35g0bw71

35g0bw712#

我已经编写了一个完整的演示,完全做到了这一点(好处是在GPU上完成所有操作-步骤之间没有复制)
https://github.com/kallaballa/GCV/blob/main/src/camera/camera-demo.cpp
关键代码部分:

using namespace kb;
//Initialize VAAPI/OpenCL Context and bind (activate) it
va::init();
//Initialize HW decoding using VAAPI
cv::VideoCapture cap(INPUT_FILENAME, cv::CAP_FFMPEG, {
        cv::CAP_PROP_HW_DEVICE, VA_HW_DEVICE_INDEX,
        cv::CAP_PROP_HW_ACCELERATION, cv::VIDEO_ACCELERATION_VAAPI,
        cv::CAP_PROP_HW_ACCELERATION_USE_OPENCL, 1
});

//bind the VAAPI/OpenCL context
va::bind()
//Initialize VP9 HW encoding using VAAPI.
cv::VideoWriter video(OUTPUT_FILENAME, cv::CAP_FFMPEG, cv::VideoWriter::fourcc('V', 'P', '9', '0'), INPUT_FPS, cv::Size(WIDTH, HEIGHT), {
    cv::VIDEOWRITER_PROP_HW_ACCELERATION, cv::VIDEO_ACCELERATION_VAAPI,
    cv::VIDEOWRITER_PROP_HW_ACCELERATION_USE_OPENCL, 1
});

//the Umat we are going to use as a framebuffer (RGBA) and the video buffer (RGB)
cv::UMat frameBuffer;
cv::UMat videoFrame;

//Initialize and bind the OpenCL context for OpenGL
gl::init();
//Aquire the framebuffer from OpenGL so we can write the video frame to it
gl::acquire_from_gl(frameBuffer);
//Bind the OpenCL context for VAAPI
va::bind();
//Decode a frame on the GPU using VAAPI
cap >> videoFrame;
//Color-conversion from BGR to RGBA into the framebuffer. (OpenCL)
cv::cvtColor(videoFrame, frameBuffer, cv::COLOR_RGB2BGRA);

//Bind the OpenCL Context for OpenGL
gl::bind();
//Release the frame buffer for use by OpenGL
gl::release_to_gl(frameBuffer);

//Bind the FBO that was prepared by calling gl::init()
glBindFramebuffer(GL_FRAMEBUFFER, kb::gl::frame_buf);
//OpenGL
glViewport(0, 0, WIDTH , HEIGHT );
glRotatef(1, 0, 1, 0);
glClearColor(0.0f, 0.0f, 1.0f, 1.0f);
glColor3f(1.0, 1.0, 1.0);
glBegin(GL_LINES);
for (GLfloat i = -2.5; i <= 2.5; i += 0.25) {
    glVertex3f(i, 0, 2.5);
    glVertex3f(i, 0, -2.5);
    glVertex3f(2.5, 0, i);
    glVertex3f(-2.5, 0, i);
}
glEnd();
glFlush();

//Acquire the frame buffer from OpenGL for use with OpenCL
gl::acquire_from_gl(frameBuffer);
//Color-conversion from BGRA to RGB. (OpenCL)
cv::cvtColor(frameBuffer, videoFrame, cv::COLOR_BGRA2RGB);
//Bind the OpenCL context for VAAPI
va::bind();
//Encode the frame using VAAPI on the GPU.
video.write(videoFrame);

缩短的互操作函数:

namespace kb {
namespace va {
void init() {
    if (!va::open_display())
        throw std::runtime_error("Failed to open VA display for CL-VA interoperability");

    va::check_if_YUV420_available();

    cv::va_intel::ocl::initializeContextFromVA(va::display, true);

    va::context = cv::ocl::OpenCLExecutionContext::getCurrent();
}
void bind() {
    va::context.bind();
}
}

namespace gl {
void init() {
    glewExperimental = true;
    glewInit();

    cv::ogl::ocl::initializeContextFromGL();

    frame_buf = 0;
    glCheck(glGenFramebuffers(1, &frame_buf));
    glCheck(glBindFramebuffer(GL_DRAW_FRAMEBUFFER, frame_buf));

    GLuint sb;
    glGenRenderbuffers(1, &sb);
    glBindRenderbuffer(GL_RENDERBUFFER, sb);
    glRenderbufferStorage(GL_RENDERBUFFER, GL_DEPTH24_STENCIL8, WIDTH, HEIGHT);

    glCheck(glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_DEPTH_STENCIL_ATTACHMENT, GL_RENDERBUFFER, sb));

    frame_buf_tex = new cv::ogl::Texture2D(cv::Size(WIDTH, HEIGHT), cv::ogl::Texture2D::RGBA, false);
    frame_buf_tex->bind();
    glCheck(glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, frame_buf_tex->texId(), 0));

    assert(glCheckFramebufferStatus(GL_FRAMEBUFFER) == GL_FRAMEBUFFER_COMPLETE);

    gl::context = cv::ocl::OpenCLExecutionContext::getCurrent();
    initialized = true;
}
void bind() {
    gl::context.bind();
}
void acquire_from_gl(cv::UMat &m) {
    glCheck(cv::ogl::convertFromGLTexture2D(*gl::frame_buf_tex, m));
}
void release_to_gl(cv::UMat &m) {
    glCheck(cv::ogl::convertToGLTexture2D(m, *gl::frame_buf_tex));
}
}
}

请注意repo的README,因为演示需要您构建我的OpenCV-4.x fork(https://github.com/kallaballa/opencv/tree/GCV)。无论如何,我正在努力将这些更改放到主线中:

相关问题