前言
项目需要渲染相机预览数据,相机返回结果是yuv420sp数据,也就是NV21数据,排布格式是这样的:

如果需要渲染到 surfaceview,则需要转 rgb数据,再渲染,非常耗时,如果直接使用yuv 数据当做贴图,使用自定义 shader来渲染,效率会非常高,在骁龙 870 上(红米 K40)降了 50%(共 800%)的cpu 占用
目前demo 采用非常冷门的 irrlicht 渲染引擎,直接使用 opengl 也是一样的逻辑
【特别注意】贴图数据填充需要放到渲染线程内
Android Image 接口
yuvReader = ImageReader.newInstance(1280, 720, ImageFormat.YUV_420_888, 10);
yuvReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
Image image = reader.acquireLatestImage();
if (image == null) {
return;
}
ByteBuffer[] imageBuffers = new ByteBuffer[image.getPlanes().length];
for (int i = 0; i < image.getPlanes().length; i++) {
imageBuffers[i] = image.getPlanes()[i].getBuffer();
}
Irrlicht.nativeUpdateImage(imageBuffers, 1280, 720, 1);
image.close();
}
}, imageReadHandler);
如上接口可以直接获取ByteBuffer[],包含了 yuv 数据,分为了三个Plane,第一个 Plane 是 y,第二个 Plane 是 uv,特别注意【第三个 Plane 是 vu,是第二个 Plane 起始地址+1】,其实只需要用到前两个 Plane 即可
native 层获取 yuv 数据如下:
jobject bufferObj = env->GetObjectArrayElement(image_buffer, 0);
if (yData == nullptr) {
yData = static_cast<uint8_t *>(malloc(h * w));
}
memcpy(yData, env->GetDirectBufferAddress(bufferObj), h * w);
env->DeleteLocalRef(bufferObj);
bufferObj = env->GetObjectArrayElement(image_buffer, 1);
if (uvData == nullptr) {
uvData = static_cast<uint8_t *>(malloc(h * w / 2));
}
memcpy(uvData, env->GetDirectBufferAddress(bufferObj), h * w / 2);
env->DeleteLocalRef(bufferObj);
如果采用 opencv 转为 rgb 的话
jobject bufferObj = env->GetObjectArrayElement(image_buffer, 0);
cv::Mat y_plane(h, w, CV_8UC1, env->GetDirectBufferAddress(bufferObj));
env->DeleteLocalRef(bufferObj);
bufferObj = env->GetObjectArrayElement(image_buffer, 1);
cv::Mat uv_plane(h / 2, w / 2, CV_8UC2, env->GetDirectBufferAddress(bufferObj));
env->DeleteLocalRef(bufferObj);
cv::cvtColorTwoPlane(y_plane, uv_plane, cameraRGBMat, cv::COLOR_YUV2BGR_NV21);
可以直接将 rgb 数据渲染到 2dImage
imageTexture = driver->addTexture(core::dimension2d<u32>(cameraWidth, cameraHeight), "$cameraImage");
auto *frame_buf = cameraRGBMat.data;
auto *tex_buf = (uint8_t *) imageTexture->lock();
for (int j = 0; j < cameraRGBMat.rows * cameraRGBMat.cols; j++) {
*(tex_buf) = *(frame_buf + 2);
*(tex_buf + 1) = *(frame_buf + 1);
*(tex_buf + 2) = *(frame_buf);
*(tex_buf + 3) = 255;
frame_buf += 3;
tex_buf += 4;
}
imageTexture->unlock();
device->getVideoDriver()->enableMaterial2D(true);
auto sd = device->getVideoDriver()->getCurrentRenderTargetSize();
device->getVideoDriver()->draw2DImage(imageTexture,
core::rect<s32>(0, 0, sd.Width,
sd.Height),
core::rect<s32>(0, 0,
imageTexture->getSize().Width,
imageTexture->getSize().Height));
device->getVideoDriver()->enableMaterial2D(false);
创建YUV贴图,并将yuv数据复制过去
yuvTexture = driver->addTexture(core::dimension2d<u32>(cameraWidth, cameraHeight),
"yuvTexture", ECF_A8R8G8B8);
这里为了简化操作,先将 yuv420 数据扩充为 yuv444 数据,并直接复制到texture的buffer 里,要注意和 shader 里的 rgba 对应关系,我这里是ECF_A8R8G8B8, buf对应b,buf+3对应 a,在 shader 里取值的时候要对应好
auto buf = static_cast<uint8_t *>(yuvTexture->lock());
auto *y_buf = yData;
int width = yuvTexture->getSize().Width;
int height = yuvTexture->getSize().Height;
for (int h = 0; h < height; ++h) {
auto *uv_buf_row = uvData + int(h / 2) * width; // 每两行共享一组UV
for (int w = 0; w < width; ++w) {
// Y分量
*buf = *y_buf;
y_buf += 1;
// UV分量
auto *uv_buf_pixel = uv_buf_row + int(w / 2) * 2; // 每两个像素共享一组UV
*(buf+1) = *uv_buf_pixel; // U分量
*(buf+2) = *(uv_buf_pixel + 1); // V分量
buf += 4;
}
}
yuvTexture->unlock();
顶点和三角形设计
定义图像的四个顶点
Vertices[0] = S3DVertex(-1.0f, -1.0f, 0.0f, // 位置,对应 shader 的inVertexPosition
1.0f, 1.0f, 1.0f, // 法向量,可以忽略
video::SColor(255, 255, 255, 255), // 顶点颜色,可以忽略
0.0f, 1.0f); // 贴图 uv 坐标,对应 shader 的inTexCoord0
Vertices[1] = S3DVertex(1.0f, -1.0f, 0.0f,
1.0f, 1.0f, 1.0f,
video::SColor(255, 255, 255, 255),
1.0f, 1.0f);
Vertices[2] = S3DVertex(1.0f, 1.0f, 0.0f,
1.0f, 1.0f, 1.0f,
video::SColor(255, 255, 255, 255),
1.0f, 0.0f);
Vertices[3] = S3DVertex(-1.0f, 1.0f, 0.0f,
1.0f, 1.0f, 1.0f,
video::SColor(255, 255, 255, 255),
0.0f, 0.0f);
定义三角形顶点的顺序
只需要定义两个三角形即可表达一个矩形画面
u16 indices[] = {2, 1, 3, 1, 0, 3};
driver->drawIndexedTriangleList(&Vertices[0], 4, &indices[0], 2);
顶点着色器 YUVVertexShader.glsl
由于直接铺满整个屏幕,所以顶点坐标可以直接对应到屏幕坐标,图像坐标和贴图坐标透传即可
注意,irrlicht 默认glBindAttribLocation了顶点坐标和贴图坐标,名字写死了是inVertexPosition和inTexCoord0,如果是 opengl 的话要注意对齐
// 指定精度
precision mediump float;
attribute vec3 inVertexPosition; // 顶点坐标,名字和glBindAttribLocation对齐
attribute vec2 inTexCoord0; // 贴图 uv 坐标,名字和glBindAttribLocation对齐
// 传递给片段着色器的纹理坐标
varying vec2 tc; // 名字和片元着色器对齐
void main()
{
// 设置顶点位置
gl_Position = vec4(inVertexPosition, 1.0);
// 传递纹理坐标
tc = inTexCoord0;
}
片元着色器 YUVFragmentShader.glsl
yuv数据是放在 argb 数据里传递的,所以需要对应取出,然后进行转换即可
precision mediump float;
varying mediump vec2 tc;
uniform sampler2D yuvTexture;
mat3 yuvToRgb = mat3(
1, 1, 1,
0, -0.39465, 2.03211,
1.13983, -0.58060, 0
);
void main()
{
mediump float y = texture2D(yuvTexture, tc).b; // Y 分量
mediump vec2 uv = texture2D(yuvTexture, tc).gr - 0.5; // UV 分量
// float r = y + 1.13983 * uv.y;
// float g = y - 0.39465 * uv.x - 0.58060 * uv.y;
// float b = y + 2.03211 * uv.x;
// gl_FragColor = vec4(r,g,b, 1.0);
vec3 rgb = yuvToRgb * vec3(y, uv);
gl_FragColor = vec4(rgb, 1.0);
}
加载 shader,进行渲染
IVideoDriver *driver = SceneManager->getVideoDriver();
video::IGPUProgrammingServices *gpu = driver->getGPUProgrammingServices();
s32 materialType = gpu->addHighLevelShaderMaterialFromFiles(
mediaPath + "Shaders/YUVVertexShader.glsl",
mediaPath + "Shaders/YUVFragmentShader.glsl", 0);
SMaterial Material;
Material.Wireframe = false;
Material.Lighting = false;
Material.ZBuffer = video::ECFN_DISABLED; // 不做深度测量,也就是和其他元素没有前后遮盖
Material.MaterialType = (video::E_MATERIAL_TYPE) materialType;
Material.setTexture(0, yuvTexture);
driver->setMaterial(Material);
driver->setTransform(ETS_WORLD, AbsoluteTransformation);
// 绘制三角形
driver->drawIndexedTriangleList(&Vertices[0], 4, &indices[0], 2);
后语
uv 通道数据还可以独立出来一个 texture传入 shader,这里由于 irrlicht 只支持 32 位的 texture,不支持直接创建 8 位和 16 位的 texture, y 和 uv 分开的话有点浪费空间,干脆扩充成 yuv444 再传入了,也可以把 y 压缩到和 uv 同宽高再输入
y 压缩到和 uv 同宽高
其他逻辑都不用变,只需要改下 Texture 的宽高,以及数据填充逻辑
yuvTexture = driver->addTexture(core::dimension2d<u32>(cameraWidth/2, cameraHeight/2),
"yuvTexture", ECF_A8R8G8B8);
auto buf = static_cast<uint8_t *>(yuvTexture->lock());
int width = yuvTexture->getSize().Width;
int height = yuvTexture->getSize().Height;
auto *uv_buf = uvData;
for (int h = 0; h < height; ++h) {
auto *y_buf_row = yData + h * 2 * width * 2; // 压缩了 y
for (int w = 0; w < width; ++w) {
// Y分量
*buf = *(y_buf_row + w * 2);
// UV分量
*(buf + 1) = *uv_buf; // U分量
*(buf + 2) = *(uv_buf + 1); // V分量
uv_buf += 2;
buf += 4;
}
}
