Remove deprecated BufferQueue constructor

Bug: 13415624
Change-Id: I1c17833511b5961af5a9bbb9cc3d627ce558d424
This commit is contained in:
Dan Stoza 2014-03-13 11:31:43 -07:00
parent d105876a6c
commit 6780a2d6a5

View File

@ -201,14 +201,16 @@ bool GLHelper::getShaderProgram(const char* name, GLuint* outPgm) {
bool GLHelper::createNamedSurfaceTexture(GLuint name, uint32_t w, uint32_t h,
sp<GLConsumer>* glConsumer, EGLSurface* surface) {
sp<BufferQueue> bq = new BufferQueue(mGraphicBufferAlloc);
sp<GLConsumer> glc = new GLConsumer(bq, name,
sp<IGraphicBufferProducer> producer;
sp<IGraphicBufferConsumer> consumer;
BufferQueue::createBufferQueue(&producer, &consumer, mGraphicBufferAlloc);
sp<GLConsumer> glc = new GLConsumer(consumer, name,
GL_TEXTURE_EXTERNAL_OES, false);
glc->setDefaultBufferSize(w, h);
glc->setDefaultMaxBufferCount(3);
glc->setConsumerUsageBits(GRALLOC_USAGE_HW_COMPOSER);
sp<ANativeWindow> anw = new Surface(bq);
sp<ANativeWindow> anw = new Surface(producer);
EGLSurface s = eglCreateWindowSurface(mDisplay, mConfig, anw.get(), NULL);
if (s == EGL_NO_SURFACE) {
fprintf(stderr, "eglCreateWindowSurface error: %#x\n", eglGetError());