简体   繁体   中英

Qt QQuickFramebufferObject OpenGL rendering invades other object's scenes and is red

I know that a minimal complete and verifiable example is generally needed, but this example cannot be broken into a smaller one because there's too much libraries needed to get and decode a video from my security camera. I hope someone can help me by pointing possible simple mistakes in rendering using QQuickFrameBufferObject.

在此处输入图片说明

The two problems I'm having is that the images are RED and the image from one object invades the space of the other object in a very inexplicable way. In the image above you can see what should be 4 different camera feeds which are instantiated through QML.

Here's the class that renders everything. YUV420P data is feeded using the update() function. You can see the simple shader that decodes YUV420P into RGB. Each QML object (camera stream) is one instance from this class.

OpenGlBufferQtQuick.cpp :

#include "OpenGlBufferQtQuick.h"
#include <QOpenGLFramebufferObjectFormat>
#include <QRunnable>
#include <QEventLoop>
#include <QNetworkReply>
#include <QNetworkRequest>
#include <QMutexLocker>
#include <memory>
#include <iostream>
#include <QTimer>

#define GET_STR(x) #x
#define A_VER 3
#define T_VER 4

static const GLfloat ver[] = {
    -1.0f,-1.0f,
     1.0f,-1.0f,
    -1.0f, 1.0f,
     1.0f, 1.0f
};

static const GLfloat tex[] = {
    0.0f, 1.0f,
    1.0f, 1.0f,
    0.0f, 0.0f,
    1.0f, 0.0f
};

//Simple shader. Outpus the same location as input, I guess
const char *vString3 = GET_STR(
    attribute vec4 vertexIn;
    attribute vec2 textureIn;
    varying vec2 textureOut;
    uniform mat4 u_transform;   
    void main(void)
    {
        gl_Position = u_transform * vertexIn;
        textureOut = textureIn;
    }
);


//The matrix below does YUV420P to RGB conversion https://en.wikipedia.org/wiki/YUV#Y%E2%80%B2UV420p_(and_Y%E2%80%B2V12_or_YV12)_to_RGB888_conversion
//This texture shader replaces the color of the pixel with the new color, but in RGB. (I guess)
const char *tString3 = GET_STR(
    varying vec2 textureOut;
    uniform sampler2D tex_y;
    uniform sampler2D tex_u;
    uniform sampler2D tex_v;
    void main(void)
    {
        vec3 yuv;
        vec3 rgb;
        yuv.x = texture2D(tex_y, textureOut).r;
        yuv.y = texture2D(tex_u, textureOut).r - 0.5;
        yuv.z = texture2D(tex_v, textureOut).r - 0.5;
        rgb = mat3(1.0, 1.0, 1.0,
            0.0, -0.39465, 2.03211,
            1.13983, -0.58060, 0.0) * yuv;
        gl_FragColor = vec4(rgb, 1.0);
    }

);


OpenGlBufferItemRenderer::OpenGlBufferItemRenderer(string uri){
    this->uri = uri;
}


void OpenGlBufferItemRenderer::render() {
    QOpenGLFunctions *f = QOpenGLContext::currentContext()->functions();
    if (firstFrameReceived) {
        if (this->firstRender) {
            std::cout << "Creating QOpenGLShaderProgram " << std::endl;
            program = new QOpenGLShaderProgram();
            f->initializeOpenGLFunctions();
            //this->m_F  = QOpenGLContext::currentContext()->functions();
            std::cout << "frameWidth: " << frameWidth << + " frameHeight: " << frameHeight << std::endl;

            std::cout << "Fragment Shader compilation: " << program->addShaderFromSourceCode(QOpenGLShader::Fragment, tString3) << std::endl;
            std::cout << "Vertex Shader compilation: " << program->addShaderFromSourceCode(QOpenGLShader::Vertex, vString3) << std::endl;

            program->bindAttributeLocation("vertexIn",A_VER);
            program->bindAttributeLocation("textureIn",T_VER);
            std::cout << "program->link() = " << program->link() << std::endl;

            f->glGenTextures(3, texs);//TODO: ERASE THIS WITH glDeleteTextures
            this->firstRender = false;
        }

        // Not strictly needed for this example, but generally useful for when
        // mixing with raw OpenGL.
        //m_window->resetOpenGLState();//COMMENT OR NOT?

        program->bind();

        QMatrix4x4 transform;
        transform.setToIdentity();
        program->setUniformValue("u_transform", transform);

        f->glVertexAttribPointer(A_VER, 2, GL_FLOAT, 0, 0, ver);
        f->glEnableVertexAttribArray(A_VER);

        f->glVertexAttribPointer(T_VER, 2, GL_FLOAT, 0, 0, tex);
        f->glEnableVertexAttribArray(T_VER);

        unis[0] = program->uniformLocation("tex_y");
        unis[1] = program->uniformLocation("tex_u");
        unis[2] = program->uniformLocation("tex_v");

        //Y
        f->glBindTexture(GL_TEXTURE_2D, texs[0]);
        f->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
        f->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
        f->glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, frameWidth, frameHeight, 0, GL_RED, GL_UNSIGNED_BYTE, 0);

        //U
        f->glBindTexture(GL_TEXTURE_2D, texs[1]);
        f->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
        f->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
        f->glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, frameWidth/2, frameHeight / 2, 0, GL_RED, GL_UNSIGNED_BYTE, 0);

        //V
        f->glBindTexture(GL_TEXTURE_2D, texs[2]);
        f->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
        f->glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
        f->glTexImage2D(GL_TEXTURE_2D, 0, GL_RED, frameWidth / 2, frameHeight / 2, 0, GL_RED, GL_UNSIGNED_BYTE, 0);

        f->glActiveTexture(GL_TEXTURE0);
        f->glBindTexture(GL_TEXTURE_2D, texs[0]);
        f->glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, frameWidth, frameHeight, GL_RED, GL_UNSIGNED_BYTE, datas[0]);
        f->glUniform1i(unis[0], 0);


        f->glActiveTexture(GL_TEXTURE0+1);
        f->glBindTexture(GL_TEXTURE_2D, texs[1]); 
        f->glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, frameWidth/2, frameHeight / 2, GL_RED, GL_UNSIGNED_BYTE, datas[1]);
        f->glUniform1i(unis[1],1);


        f->glActiveTexture(GL_TEXTURE0+2);
        f->glBindTexture(GL_TEXTURE_2D, texs[2]);
        f->glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, frameWidth / 2, frameHeight / 2, GL_RED, GL_UNSIGNED_BYTE, datas[2]);
        f->glUniform1i(unis[2], 2);

        f->glDrawArrays(GL_TRIANGLE_STRIP,0,4);

        program->disableAttributeArray(A_VER);
        program->disableAttributeArray(T_VER);
        program->release();

    } 
    update();
}

QOpenGLFramebufferObject *OpenGlBufferItemRenderer::createFramebufferObject(const QSize &size)
{
    QOpenGLFramebufferObjectFormat format;
    format.setAttachment(QOpenGLFramebufferObject::CombinedDepthStencil);
    //format.setSamples(16);
    return new QOpenGLFramebufferObject(size, format);
}
//https://blog.qt.io/blog/2015/05/11/integrating-custom-opengl-rendering-with-qt-quick-via-qquickframebufferobject/
void OpenGlBufferItemRenderer::synchronize(QQuickFramebufferObject *item)
{
    OpenGlBufferItem *openGlBufferItem = static_cast<OpenGlBufferItem*>(item);

    std::cout << "synchronize called " << std::endl;
    std::cout << "starting new renderer for uri " << this-> uri << std::endl;

    MediaStream* camera1 = new MediaStream(this->uri);
    camera1->setFrameUpdater((FrameUpdater *) this);
    //TODO: put mutex on std::cout of this thread
    //TODO: make this thread actualy run here instead of on a thread, I guess.
    boost::thread mediaThread(&MediaStream::run, camera1);
}

OpenGlBufferItem::OpenGlBufferItem(){}

void OpenGlBufferItemRenderer::updateData(unsigned char**data, int frameWidth, int frameHeight)
{
    this->frameWidth = frameWidth;
    this->frameHeight = frameHeight;
    //Before first render, datas pointer isn't even created yet
    if (!firstFrameReceived) {
        datas[0] = new unsigned char[frameWidth*frameHeight];   //Y
        datas[1] = new unsigned char[frameWidth*frameHeight/4]; //U
        datas[2] = new unsigned char[frameWidth*frameHeight/4]; //V
        firstFrameReceived = true;
    } else {
        memcpy(datas[0], data[0], frameWidth*frameHeight);
        memcpy(datas[1], data[1], frameWidth*frameHeight/4);
        memcpy(datas[2], data[2], frameWidth*frameHeight/4);
    }
}

QQuickFramebufferObject::Renderer *OpenGlBufferItem::createRenderer() const
{
    //std::cout << "createRenderer called ------------------------" << std::endl;
    return new OpenGlBufferItemRenderer(this->uri);
}

Here's main.qml :

import QtQuick 2.0
import OpenGlBufferQtQuick 1.0

Grid {
    columns: 2
    spacing: 2    
    width: 1280
    height: 720
    OpenGlBufferQtQuick {
        width: 640
        height: 360
        uri: "rtsp://admin:123456@192.168.0.103:10554/tcp/av0_0"
    }
    OpenGlBufferQtQuick {
        width: 640
        height: 360
        uri: "rtsp://admin:123456@192.168.0.101:10554/tcp/av0_0"
    }
    OpenGlBufferQtQuick {
        width: 640
        height: 360
        uri: "rtsp://admin:123456@192.168.0.104:10554/tcp/av0_0"
    }
    OpenGlBufferQtQuick {
        width: 640
        height: 360
        uri: "rtsp://admin:123456@192.168.1.43:10554/tcp/av0_0"
    }

}

As you can see, I'm calling 4 different camera streams, but the stream from the first camera invades the space of the other streams, even though each stream is a completely different object.

Also, the image is RED. I used almost the same code to render using class OpenGlVideoQtQuickRenderer : public QObject, protected QOpenGLFunctions and it works without any red screen or opengl invading the other space.

I payed someone to help me and the problem was that the texture shader wasn't activated. These are the changes:

https://github.com/lucaszanella/orwell/commit/b2882768badb16e4334bc2bd0371611221283e97#diff-b089e4d46edc159fb6e7de932e64219b

Basically:

GLint originTextureUnit;
        f->glGetIntegerv(GL_ACTIVE_TEXTURE, &originTextureUnit);

and

 f->glActiveTexture(originTextureUnit);

however I don't still understand why the texture wasn't activated.

The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM