Remove deprecated BufferQueue constructor
Bug: 13415624 Change-Id: I1c17833511b5961af5a9bbb9cc3d627ce558d424
This commit is contained in:
parent
d105876a6c
commit
6780a2d6a5
@ -201,14 +201,16 @@ bool GLHelper::getShaderProgram(const char* name, GLuint* outPgm) {
|
|||||||
|
|
||||||
bool GLHelper::createNamedSurfaceTexture(GLuint name, uint32_t w, uint32_t h,
|
bool GLHelper::createNamedSurfaceTexture(GLuint name, uint32_t w, uint32_t h,
|
||||||
sp<GLConsumer>* glConsumer, EGLSurface* surface) {
|
sp<GLConsumer>* glConsumer, EGLSurface* surface) {
|
||||||
sp<BufferQueue> bq = new BufferQueue(mGraphicBufferAlloc);
|
sp<IGraphicBufferProducer> producer;
|
||||||
sp<GLConsumer> glc = new GLConsumer(bq, name,
|
sp<IGraphicBufferConsumer> consumer;
|
||||||
|
BufferQueue::createBufferQueue(&producer, &consumer, mGraphicBufferAlloc);
|
||||||
|
sp<GLConsumer> glc = new GLConsumer(consumer, name,
|
||||||
GL_TEXTURE_EXTERNAL_OES, false);
|
GL_TEXTURE_EXTERNAL_OES, false);
|
||||||
glc->setDefaultBufferSize(w, h);
|
glc->setDefaultBufferSize(w, h);
|
||||||
glc->setDefaultMaxBufferCount(3);
|
glc->setDefaultMaxBufferCount(3);
|
||||||
glc->setConsumerUsageBits(GRALLOC_USAGE_HW_COMPOSER);
|
glc->setConsumerUsageBits(GRALLOC_USAGE_HW_COMPOSER);
|
||||||
|
|
||||||
sp<ANativeWindow> anw = new Surface(bq);
|
sp<ANativeWindow> anw = new Surface(producer);
|
||||||
EGLSurface s = eglCreateWindowSurface(mDisplay, mConfig, anw.get(), NULL);
|
EGLSurface s = eglCreateWindowSurface(mDisplay, mConfig, anw.get(), NULL);
|
||||||
if (s == EGL_NO_SURFACE) {
|
if (s == EGL_NO_SURFACE) {
|
||||||
fprintf(stderr, "eglCreateWindowSurface error: %#x\n", eglGetError());
|
fprintf(stderr, "eglCreateWindowSurface error: %#x\n", eglGetError());
|
||||||
|
Loading…
Reference in New Issue
Block a user