YUV420P、YUV420SP、NV12、NV21和RGB互相轉換並存儲為JPEG以及PNG圖片
音視訊實踐學習
- android全平臺編譯ffmpeg以及x264與fdk-aac實踐
- ubuntu下使用nginx和nginx-rtmp-module配置直播推流伺服器
- android全平臺編譯ffmpeg合併為單個庫實踐
- android-studio使用cmake編譯ffmpeg實踐
- android全平臺編譯ffmpeg視訊解碼器實踐
- android全平臺編譯ffmpeg支援命令列實踐
- android全平臺編譯ffmpeg視訊推流實踐
- android平臺下音訊編碼之編譯LAME庫轉碼PCM為MP3
- ubuntu平臺下編譯vlc-android視訊播放器實踐
- 圖解YU12、I420、YV12、NV12、NV21、YUV420P、YUV420SP、YUV422P、YUV444P的區別
- 圖解RGB565、RGB555、RGB16、RGB24、RGB32、ARGB32等格式的區別
- YUV420P、YUV420SP、NV12、NV21和RGB互相轉換並存儲為JPEG以及PNG圖片
- android全平臺編譯libyuv庫實現YUV和RGB的轉換
RGB轉YUV420P
這裡就不新建android工程來測試了
,直接使用Clion
來執行測試輸出
首先將我們目標的JPEG
圖片轉換為RGB檔案
:
ffmpeg -i main.jpg - s 510x510 -pix_fmt rgb24 rgb24.rgb
開始執行轉換RGB24轉換為YUV
,這裡要注意的一點就是,RGB24
實際佔用的記憶體為width * height * 3
,而YUV420P
實際佔用的記憶體為width * height * 3 / 2
unsigned char ClipValue(unsigned char x, unsigned char min_val, unsigned char max_val) {
if (x > max_val) {
return max_val;
} else if (x < min_val) {
return min_val;
} else {
return x;
}
}
void RGB2YUV420P(unsigned char *rgb24, int width, int height, unsigned char *yuv420p) {
unsigned char *ptrY, *ptrU, *ptrV;
memset(yuv420p, 0, width * height * 3 / 2);
ptrY = yuv420p;
ptrU = yuv420p + width * height;
ptrV = ptrU + (width * height * 1 / 4);
unsigned char y, u, v, r, g, b;
int index = 0;
for (int j = 0; j < height; j++) {
for (int i = 0; i < width; i++) {
index = width * j * 3 + i * 3;
r = rgb24[index];
g = rgb24[index + 1];
b = rgb24[index + 2];
y = (unsigned char) ((66 * r + 129 * g + 25 * b + 128) >> 8) + 16;
u = (unsigned char) ((-38 * r - 74 * g + 112 * b + 128) >> 8) + 128;
v = (unsigned char) ((112 * r - 94 * g - 18 * b + 128) >> 8) + 128;
*(ptrY++) = ClipValue(y, 0, 255);
if (j % 2 == 0 && i % 2 == 0) {
*(ptrU++) = ClipValue(u, 0, 255);
} else if (i % 2 == 0) {
*(ptrV++) = ClipValue(v, 0, 255);
}
}
}
}
void rgb24_to_yuv420(char *rgbPath, char *yuvPath, int width, int height) {
FILE *fp_rgb = fopen(rgbPath, "rb+");
FILE *fp_yuv = fopen(yuvPath, "wb+");
unsigned char *rgb24_data = (unsigned char *) malloc(width * height * 3);
unsigned char *yuv420_data = (unsigned char *) malloc(width * height * 3 / 2);
fread(rgb24_data, 1, width * height * 3, fp_rgb);
RGB2YUV420P(rgb24_data, width, height, yuv420_data);
fwrite(yuv420_data, 1, width * height * 3 / 2, fp_yuv);
free(rgb24_data);
free(yuv420_data);
fclose(fp_rgb);
fclose(fp_yuv);
}
我們可以使用ffplay
來播放我們最終生成的rgb24.yuv
檔案
YUV420P轉RGB
你也可以根據上面的例子一樣,直接使用Clion
轉換完之後,使用雷神的yuvplayer
來檢視,這裡筆者直接新建了一個native-yuv2rgb
,將最終的yuv
檔案轉換為RGB資料
之後基於ANativeWindow
渲染出來。
定義好java層
的介面
package com.onzhou.graphic.yuv2rgb;
import android.view.Surface;
public class NativeYUV2RGB {
static {
System.loadLibrary("native-yuv2rgb");
}
public native void yuv2rgb(String imagePath, int width, int height, Surface surface);
}
接下來就是核心的native層
實現
/**
* YUV420P轉RGB24
* @param data
* @param rgb
* @param width
* @param height
*/
void YUV420P_TO_RGB24(unsigned char *data, unsigned char *rgb, int width, int height) {
int index = 0;
unsigned char *ybase = data;
unsigned char *ubase = &data[width * height];
unsigned char *vbase = &data[width * height * 5 / 4];
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
//YYYYYYYYUUVV
u_char Y = ybase[x + y * width];
u_char U = ubase[y / 2 * width / 2 + (x / 2)];
u_char V = vbase[y / 2 * width / 2 + (x / 2)];
rgb[index++] = Y + 1.402 * (V - 128); //R
rgb[index++] = Y - 0.34413 * (U - 128) - 0.71414 * (V - 128); //G
rgb[index++] = Y + 1.772 * (U - 128); //B
}
}
}
上面這個方法的主要作用就是將輸入的YUV資料轉換為RGB
,轉換公式可以參考之前的部落格,或者百度一下。
接下來我們需要通過ANativeWindow_fromSurface
獲取應用的視窗,然後讀取YUV檔案
轉換為最終的RGB資料
,接著將最終的RGB資料
寫入到視窗的buffer
中去,完成渲染。
void drawYUV(const char *path, int width, int height, ANativeWindow_Buffer buffer) {
FILE *file = fopen(path, "rb");
unsigned char *yuvData = new unsigned char[width * height * 3 / 2];
fread(yuvData, 1, width * height * 3 / 2, file);
unsigned char *rgb24 = new unsigned char[width * height * 3];
//YUV420P轉RGB24
YUV420P_TO_RGB24(yuvData, rgb24, width, height);
uint32_t *line = (uint32_t *) buffer.bits;
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
int index = y * width + x;
line[x] = rgb24[index * 3 + 2] << 16
| rgb24[index * 3 + 1] << 8
| rgb24[index * 3];
}
line = line + buffer.stride;
}
//釋放記憶體
delete[] yuvData;
delete[] rgb24;
//關閉檔案控制代碼
fclose(file);
}
void yuv2rgb(JNIEnv *env, jobject obj, jstring jpegPath, jint width, jint height, jobject surface) {
const char *path = env->GetStringUTFChars(jpegPath, 0);
//獲取目標surface
ANativeWindow *window = ANativeWindow_fromSurface(env, surface);
if (NULL == window) {
ThrowException(env, "java/lang/RuntimeException", "unable to get native window");
return;
}
//預設的是RGB_565
int32_t result = ANativeWindow_setBuffersGeometry(window, 0, 0, WINDOW_FORMAT_RGBA_8888);
if (result < 0) {
ThrowException(env, "java/lang/RuntimeException", "unable to set buffers geometry");
//釋放視窗
ANativeWindow_release(window);
window = NULL;
return;
}
ANativeWindow_acquire(window);
ANativeWindow_Buffer buffer;
//鎖定視窗的繪圖表面
if (ANativeWindow_lock(window, &buffer, NULL) < 0) {
ThrowException(env, "java/lang/RuntimeException", "unable to lock native window");
//釋放視窗
ANativeWindow_release(window);
window = NULL;
return;
}
//繪製YUV
drawYUV(path, width, height, buffer);
//解鎖視窗的繪圖表面
if (ANativeWindow_unlockAndPost(window) < 0) {
ThrowException(env, "java/lang/RuntimeException",
"unable to unlock and post to native window");
}
env->ReleaseStringUTFChars(jpegPath, path);
//釋放
ANativeWindow_release(window);
}
在啟動的Activity
中,讀取目標的YUV檔案
,將路徑
以及surface
傳遞給native層
完成轉換以及渲染。
public void onYUV420PToRGB24(View view) {
File file = new File(getExternalFilesDir(null), "yuv420p.yuv");
mNativeYUV2RGB.yuv2rgb(file.getAbsolutePath(), NativeYUV2RGB.Type.YUV420P_TO_RGB24, 510, 510, mSurfaceView.getHolder().getSurface());
}
最終顯示的效果圖:
NV12轉RGB24
之前的部落格也說了,NV12(NV12: YYYYYYYYUVUV =>YUV420SP)本質上也屬於YUV420SP
,在進行這個例子之前,我們先拿到一張最原始的NV12的檔案
。
ffmpeg -i main.jpg -s 510x510 -pix_fmt nv12 nv12.yuv
轉換過程跟上述類似,下面給出具體的轉換公式:
/**
* NV12屬於YUV420SP格式
* @param data
* @param rgb
* @param width
* @param height
*/
void NV12_TO_RGB24(unsigned char *data, unsigned char *rgb, int width, int height) {
int index = 0;
unsigned char *ybase = data;
unsigned char *ubase = &data[width * height];
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
//YYYYYYYYUVUV
u_char Y = ybase[x + y * width];
u_char U = ubase[y / 2 * width + (x / 2) * 2];
u_char V = ubase[y / 2 * width + (x / 2) * 2 + 1];
rgb[index++] = Y + 1.402 * (V - 128); //R
rgb[index++] = Y - 0.34413 * (U - 128) - 0.71414 * (V - 128); //G
rgb[index++] = Y + 1.772 * (U - 128); //B
}
}
}
NV21轉RGB24
同上面所述,NV21(NV21: YYYYYYYYVUVU =>YUV420SP)同樣上也屬於YUV420SP
,這個格式通常也預設是android手機相機的格式
,在進行這個例子之前,我們先拿到一張最原始的NV21的檔案
。
ffmpeg -i main.jpg -s 510x510 -pix_fmt nv21 nv21.yuv
轉換公式:
/**
* NV12屬於YUV420SP格式,android相機預設格式
* @param data
* @param rgb
* @param width
* @param height
*/
void NV21_TO_RGB24(unsigned char *data, unsigned char *rgb, int width, int height) {
int index = 0;
unsigned char *ybase = data;
unsigned char *ubase = &data[width * height];
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
//YYYYYYYYVUVU
u_char Y = ybase[x + y * width];
u_char U = ubase[y / 2 * width + (x / 2) * 2 + 1];
u_char V = ubase[y / 2 * width + (x / 2) * 2];
rgb[index++] = Y + 1.402 * (V - 128); //R
rgb[index++] = Y - 0.34413 * (U - 128) - 0.71414 * (V - 128); //G
rgb[index++] = Y + 1.772 * (U - 128); //B
}
}
}
專案地址:native-yuv2rgb
https://github.com/byhook/graphic4android
儲存為JPEG圖片
YUV資料
是無法直接儲存為JPEG圖片的
,這裡需要先轉換為RGB資料
,然後利用libjpeg-turbo庫
,完成JPEG轉換
。
YUV420P轉RGB24
:
void YUV420P_TO_RGB24(unsigned char *yuv420p, unsigned char *rgb24, int width, int height) {
int index = 0;
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
int indexY = y * width + x;
int indexU = width * height + y / 2 * width / 2 + x / 2;
int indexV = width * height + width * height / 4 + y / 2 * width / 2 + x / 2;
u_char Y = yuv420p[indexY];
u_char U = yuv420p[indexU];
u_char V = yuv420p[indexV];
rgb24[index++] = Y + 1.402 * (V - 128); //R
rgb24[index++] = Y - 0.34413 * (U - 128) - 0.71414 * (V - 128); //G
rgb24[index++] = Y + 1.772 * (U - 128); //B
}
}
}
然後將轉換後的RGB資料
通過libjpeg-turbo
寫入檔案:
int YUV2JPEG::RGB24_TO_JPEG(const char *filename, u_char *rgbData, int image_width,
int image_height,
int quality) {
jpeg_compress_struct jpegCompress;
jpeg_error_mgr jpegError;
jpegCompress.err = jpeg_std_error(&jpegError);
//