Merge "media: refine kMetadataBufferTypes" into mnc-dev

This commit is contained in:
Lajos Molnar 2015-06-02 03:32:00 +00:00 committed by Android (Google) Code Review
commit da7cbd8c7c
2 changed files with 64 additions and 29 deletions

View File

@ -52,9 +52,9 @@ struct EnableAndroidNativeBuffersParams {
OMX_BOOL enable; OMX_BOOL enable;
}; };
// A pointer to this struct is passed to OMX_SetParameter() when the extension // A pointer to this struct is passed to OMX_SetParameter() when the extension index
// index "OMX.google.android.index.storeMetaDataInBuffers" // "OMX.google.android.index.storeMetaDataInBuffers" or
// is given. // "OMX.google.android.index.storeANWBufferInMetadata" is given.
// //
// When meta data is stored in the video buffers passed between OMX clients // When meta data is stored in the video buffers passed between OMX clients
// and OMX components, interpretation of the buffer data is up to the // and OMX components, interpretation of the buffer data is up to the
@ -62,19 +62,33 @@ struct EnableAndroidNativeBuffersParams {
// some information helpful for the receiver to locate the actual data. // some information helpful for the receiver to locate the actual data.
// The buffer receiver thus needs to know how to interpret what is stored // The buffer receiver thus needs to know how to interpret what is stored
// in these buffers, with mechanisms pre-determined externally. How to // in these buffers, with mechanisms pre-determined externally. How to
// interpret the meta data is outside of the scope of this method. // interpret the meta data is outside of the scope of this parameter.
// //
// Currently, this is specifically used to pass meta data from video source // Currently, this is used to pass meta data from video source (camera component, for instance) to
// (camera component, for instance) to video encoder to avoid memcpying of // video encoder to avoid memcpying of input video frame data, as well as to pass dynamic output
// input video frame data. To do this, bStoreMetaData is set to OMX_TRUE. // buffer to video decoder. To do this, bStoreMetaData is set to OMX_TRUE.
// If bStoreMetaData is set to false, real YUV frame data will be stored
// in the buffers. In addition, if no OMX_SetParameter() call is made
// with the corresponding extension index, real YUV data is stored
// in the buffers.
// //
// For video decoder output port, the metadata buffer layout is defined below. // If bStoreMetaData is set to false, real YUV frame data will be stored in input buffers, and
// the output buffers contain either real YUV frame data, or are themselves native handles as
// directed by enable/use-android-native-buffer parameter settings.
// In addition, if no OMX_SetParameter() call is made on a port with the corresponding extension
// index, the component should not assume that the client is not using metadata mode for the port.
// //
// Metadata buffers are registered with the component using UseBuffer calls. // If the component supports this using the "OMX.google.android.index.storeANWBufferInMetadata"
// extension and bStoreMetaData is set to OMX_TRUE, data is passed using the VideoNativeMetadata
// layout as defined below. Each buffer will be accompanied by a fence. The fence must signal
// before the buffer can be used (e.g. read from or written into). When returning such buffer to
// the client, component must provide a new fence that must signal before the returned buffer can
// be used (e.g. read from or written into). The component owns the incoming fenceFd, and must close
// it when fence has signaled. The client will own and close the returned fence file descriptor.
//
// If the component supports this using the "OMX.google.android.index.storeMetaDataInBuffers"
// extension and bStoreMetaData is set to OMX_TRUE, data is passed using VideoGrallocMetadata
// (the layout of which is the VideoGrallocMetadata defined below). Camera input can be also passed
// as "CameraSource", the layout of which is vendor dependent.
//
// Metadata buffers are registered with the component using UseBuffer calls, or can be allocated
// by the component for encoder-metadata-output buffers.
struct StoreMetaDataInBuffersParams { struct StoreMetaDataInBuffersParams {
OMX_U32 nSize; OMX_U32 nSize;
OMX_VERSIONTYPE nVersion; OMX_VERSIONTYPE nVersion;
@ -84,9 +98,15 @@ struct StoreMetaDataInBuffersParams {
// Meta data buffer layout used to transport output frames to the decoder for // Meta data buffer layout used to transport output frames to the decoder for
// dynamic buffer handling. // dynamic buffer handling.
struct VideoDecoderOutputMetaData { struct VideoGrallocMetadata {
MetadataBufferType eType; MetadataBufferType eType; // must be kMetadataBufferTypeGrallocSource
buffer_handle_t pHandle; buffer_handle_t hHandle;
};
struct VideoNativeMetadata {
MetadataBufferType eType; // must be kMetadataBufferTypeANWBuffer
struct ANativeWindowBuffer* pBuffer;
int nFenceFd; // -1 if unused
}; };
// A pointer to this struct is passed to OMX_SetParameter() when the extension // A pointer to this struct is passed to OMX_SetParameter() when the extension

View File

@ -77,28 +77,43 @@ typedef enum {
* GRalloc buffer. The encoder needs to interpret this GRalloc handle * GRalloc buffer. The encoder needs to interpret this GRalloc handle
* and encode the frames. * and encode the frames.
* -------------------------------------------------------------- * --------------------------------------------------------------
* | kMetadataBufferTypeGrallocSource | sizeof(buffer_handle_t) | * | kMetadataBufferTypeGrallocSource | buffer_handle_t buffer |
* -------------------------------------------------------------- * --------------------------------------------------------------
*
* See the VideoGrallocMetadata structure.
*/ */
kMetadataBufferTypeGrallocSource = 1, kMetadataBufferTypeGrallocSource = 1,
/* /*
* kMetadataBufferTypeGraphicBuffer is used to indicate that * kMetadataBufferTypeGraphicBuffer is used to indicate that
* the payload of the metadata buffers can be interpreted as * the payload of the metadata buffers can be interpreted as
* a GraphicBuffer. It is only to be used by software encoders. * an ANativeWindowBuffer, and that a fence is provided.
* In this case, the metadata that the encoder receives *
* will have a byte stream that consists of two parts: * In this case, the metadata will have a byte stream that consists of three parts:
* 1. First, there is an integer indicating that the metadata * 1. First, there is an integer indicating that the metadata
* contains a GraphicBuffer (kMetadataBufferTypeGraphicBuffer) * contains an ANativeWindowBuffer (kMetadataBufferTypeANWBuffer)
* 2. This is followed by the pointer to the GraphicBuffer that * 2. This is followed by the pointer to the ANativeWindowBuffer.
* is to be encoded. Encoder must not create a sp<> from this * Codec must not free this buffer as it does not actually own this buffer.
* graphic buffer, or free it, as it does not actually own this * 3. Finally, there is an integer containing a fence file descriptor.
* buffer. * The codec must wait on the fence before encoding or decoding into this
* -------------------------------------------------------------- * buffer. When the buffer is returned, codec must replace this file descriptor
* | kMetadataBufferTypeGraphicBuffer | sizeof(GraphicBuffer *) | * with a new fence, that will be waited on before the buffer is replaced
* -------------------------------------------------------------- * (encoder) or read (decoder).
* ---------------------------------
* | kMetadataBufferTypeANWBuffer |
* ---------------------------------
* | ANativeWindowBuffer *buffer |
* ---------------------------------
* | int fenceFd |
* ---------------------------------
*
* See the VideoNativeMetadata structure.
*/ */
kMetadataBufferTypeGraphicBuffer = 2, kMetadataBufferTypeANWBuffer = 2,
/* This value is used by framework, but is never used inside a metadata buffer */
kMetadataBufferTypeInvalid = -1,
// Add more here... // Add more here...