在《Android图形显示系统》中介绍了2D图形和3D图形的生产过程,对于视频的生产方式没有介绍,这里以Camera预览为例,介绍Camera拍摄得到的图像是如何传递到显示设备显示的。
首先来看一个Camera预览的简单示例,代码如下。
package com.sino.camera;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import android.hardware.Camera;
import android.os.Bundle;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import java.io.IOException;
public class MainActivity extends AppCompatActivity {
SurfaceView surfaceView;
Camera mCamera;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
surfaceView = findViewById(R.id.surface_view);
surfaceView.getHolder().addCallback(new SurfaceHolder.Callback() {
@Override
public void surfaceCreated(@NonNull SurfaceHolder holder) {
try {
mCamera.setPreviewDisplay(holder);
} catch (IOException e) {
e.printStackTrace();
}
mCamera.startPreview();
}
@Override
public void surfaceChanged(@NonNull SurfaceHolder holder, int format, int width, int height) {
}
@Override
public void surfaceDestroyed(@NonNull SurfaceHolder holder) {
mCamera.stopPreview();
}
});
mCamera = Camera.open();
setPreviewOrientation(mCamera);
}
public void setPreviewOrientation(Camera camera){
Camera.CameraInfo info = new Camera.CameraInfo();
Camera.getCameraInfo(0, info);
int rotation = getWindowManager().getDefaultDisplay()
.getRotation();
int degree = 0;
if(rotation == Surface.ROTATION_90){
degree = 90;
}else if(rotation == Surface.ROTATION_180){
degree = 180;
}if(rotation == Surface.ROTATION_270){
degree = 270;
}
int orientation = (info.orientation - degree+360) % 360;
camera.setDisplayOrientation(orientation);
}
}
本示例的功能也很简单,创建一个Camera对象,通过setPreviewDisplay把SurfaceHolder作为预览输出。有了SurfaceHolder相当于已经创建好了图层Surface,剩下的主要是申请缓存,把视频内容绘制到缓存里,再把缓存投递到队列里。
下面介绍setPreviewDisplay的流程,调用流程如下。
/* frameworks\base\core\java\android\hardware\Camera.java */
public class Camera {
public final void setPreviewDisplay(SurfaceHolder holder) throws IOException {
if (holder != null) {
setPreviewSurface(holder.getSurface());
} else {
setPreviewSurface((Surface)null);
}
}
public native final void setPreviewSurface(Surface surface) throws IOException;
}
/* frameworks\base\core\jni\android_hardware_Camera.cpp */
static void android_hardware_Camera_setPreviewSurface(JNIEnv *env, jobject thiz, jobject jSurface)
{
sp<Camera> camera = get_native_camera(env, thiz, NULL);
sp<IGraphicBufferProducer> gbp;
sp<Surface> surface;
if (jSurface) {
surface = android_view_Surface_getSurface(env, jSurface);
if (surface != NULL) {
gbp = surface->getIGraphicBufferProducer();
}
}
if (camera->setPreviewTarget(gbp) != NO_ERROR) {
}
}
/* frameworks\av\camera\Camera.cpp */
status_t Camera::setPreviewTarget(const sp<IGraphicBufferProducer>& bufferProducer)
{
sp <::android::hardware::ICamera> c = mCamera;
return c->setPreviewTarget(bufferProducer);
}
setPreviewTarget把Surface的IGraphicBufferProducer传给了CameraService进程。接下来是CameraService的处理流程。
/*frameworks\av\services\camera\libcameraservice\api1\CameraClient.cpp */
status_t CameraClient::setPreviewTarget(
const sp<IGraphicBufferProducer>& bufferProducer) {
sp<IBinder> binder;
sp<ANativeWindow> window;
if (bufferProducer != 0) {
binder = IInterface::asBinder(bufferProducer);
window = new Surface(bufferProducer, /*controlledByApp*/ true);
}
return setPreviewWindow(binder, window);
}
status_t CameraClient::setPreviewWindow(const sp<IBinder>& binder,
const sp<ANativeWindow>& window) {
if (mHardware->previewEnabled()) {
if (window != 0) {
mHardware->setPreviewScalingMode(NATIVE_WINDOW_SCALING_MODE_SCALE_TO_WINDOW);
mHardware->setPreviewTransform(mOrientation);
result = mHardware->setPreviewWindow(window);
}
}
}
在CameraService中,根据GraphicBufferProducer构造出ANativeWindow对象(其实就是Surface),然后传递HAL进程。HAL拿到ANativeWindow后,就可以申请图像缓存,把图像缓存传递到SurfaceFlinger处理。
Return<Status> CameraDevice::setPreviewWindow(const sp<ICameraDevicePreviewCallback>& window) {
mHalPreviewWindow.mPreviewCallback = window;
if (mDevice->ops->set_preview_window) {
return getHidlStatus(mDevice->ops->set_preview_window(mDevice,
(window == nullptr) ? nullptr : &mHalPreviewWindow));
}
}
camera_device_ops_t QCameraMuxer::mCameraMuxerOps = {
.set_preview_window = QCameraMuxer::set_preview_window,
}
int QCameraMuxer::set_preview_window(struct camera_device * device,
struct preview_stream_ops *window)
{
for (uint32_t i = 0; i < cam->numCameras; i++) {
pCam = gMuxer->getPhysicalCamera(cam, i);
if (pCam->mode == CAM_MODE_PRIMARY) {
QCamera2HardwareInterface *hwi = pCam->hwi;
rc = hwi->set_preview_window(pCam->dev, window);
}
}
return rc;
}
int QCamera2HardwareInterface::set_preview_window(struct camera_device *device,
struct preview_stream_ops *window)
{
QCamera2HardwareInterface *hw =
reinterpret_cast<QCamera2HardwareInterface *>(device->priv);
rc = hw->processAPI(QCAMERA_SM_EVT_SET_PREVIEW_WINDOW, (void *)window);
}
void CameraDevice::initHalPreviewWindow()
{
mHalPreviewWindow.dequeue_buffer = sDequeueBuffer;
mHalPreviewWindow.enqueue_buffer = sEnqueueBuffer;
}
int CameraDevice::sDequeueBuffer(struct preview_stream_ops* w,
buffer_handle_t** buffer, int *stride) {
CameraPreviewWindow* object = static_cast<CameraPreviewWindow*>(w);
Status s;
object->mPreviewCallback->dequeueBuffer(
[&](auto status, uint64_t bufferId, const auto& buf, uint32_t strd) {
s = status;
if (s == Status::OK) {
*buffer = &(object->mCirculatingBuffers[bufferId]);
*stride = strd;
}
});
}
int32_t QCamera2HardwareInterface::preparePreview()
{
rc = addChannel(QCAMERA_CH_TYPE_PREVIEW);
}
int32_t QCamera2HardwareInterface::addChannel(qcamera_ch_type_enum_t ch_type)
{
switch (ch_type) {
case QCAMERA_CH_TYPE_PREVIEW:
rc = addPreviewChannel();
break;
}
}
int32_t QCamera2HardwareInterface::addPreviewChannel()
{
rc = addStreamToChannel(pChannel, CAM_STREAM_TYPE_PREVIEW,
preview_stream_cb_routine, this);
}
void QCamera2HardwareInterface::preview_stream_cb_routine(mm_camera_super_buf_t *super_frame,
QCameraStream * stream,
void *userdata)
{
err = memory->enqueueBuffer(idx);
int dequeuedIdx = memory->dequeueBuffer();
}
int32_t QCameraGrallocMemory::dequeueBuffer()
{
err = mWindow->dequeue_buffer(mWindow, &buffer_handle, &stride);
return dequeuedIdx;
}
int32_t QCameraGrallocMemory::enqueueBuffer(uint32_t index, nsecs_t timeStamp)
{
err = mWindow->enqueue_buffer(mWindow, (buffer_handle_t *)mBufferHandle[index]);
}
在HAL进程中,调用ANativeWindow的dequeue_buffer申请缓存,调enqueue_buffer投递缓存,根据前面介绍eglSwapBuffers已经知道,enqueue_buffer会调到Surface的queueBuffer,dequeue_buffer调到Surface的dequeueBuffer。