前往小程序,Get更优阅读体验!
立即前往
首页
学习
活动
专区
工具
TVP
发布
社区首页 >专栏 >小型电裁剪刀_手动裁剪

小型电裁剪刀_手动裁剪

作者头像
全栈程序员站长
发布2022-11-08 16:50:09
1.5K0
发布2022-11-08 16:50:09
举报
文章被收录于专栏:全栈程序员必看

大家好,又见面了,我是你们的朋友全栈君。

由于简书经常打不开,或者打开慢,不靠谱,还是把文章迁移到CSDN吧。 简书链接:https://www.jianshu.com/p/8c6508cab763 有时候想对摄像头采集的视频流进行区域裁剪,可以使用libyuv这个库,原理就是先把NV12转换为i420,对i420做裁剪,然后再把i420转换为NV12,NV12再转换为CVPixelBufferRef,CVPixelBufferRef再转换为CMSampleBufferRef。

这里有几个注意点:
1.iOS13使用了64位对齐,也就是步长是64的倍数。而之前的版本使用的是16位对齐。
2.使用 libyuv::ConvertToI420 方法时,src_width需要填入步长而不是宽度,因为yuv内部要根据步长来取U、V数据,如果是填入的宽度,那么就会取值位移错误,导致转换失真。
3.因为我是直接NV12数据转换的,所以填写的类型是:libyuv::FOURCC_NV12。应该根据当前数据的类型选择对应的格式。
4.NV12转换为CVPixelBufferRef时,填入对应的步长:nv12_plane1_stride。

关于步长解释:https://www.jianshu.com/p/eace8c08b169

一:对NV12裁剪代码如下:
代码语言:javascript
复制
+ (CVPixelBufferRef)convertNV12ToI420Screenshots:(CMSampleBufferRef)sampleBufRef screenshotsFrame:(CGRect)screenshotsFrame {
int screenshots_x = screenshotsFrame.origin.x;
int screenshots_y = screenshotsFrame.origin.y;
int screenshots_width = screenshotsFrame.size.width;
int screenshots_hight = screenshotsFrame.size.height;
// 确保宽高是偶数
if (screenshots_width % 2 != 0) {
screenshots_width++;
}
if (screenshots_hight % 2 != 0) {
screenshots_hight++;
}
//CVPixelBufferRef是CVImageBufferRef的别名,两者操作几乎一致。
//获取CMSampleBuffer的图像地址
CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBufRef);
if (!pixelBuffer) {
return nil;
}
//表示开始操作数据
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
//图像高度(像素)
size_t buffer_width = CVPixelBufferGetWidth(pixelBuffer);
size_t buffer_height = CVPixelBufferGetHeight(pixelBuffer);
//获取CVImageBufferRef中的y数据
uint8_t *src_y_frame = (unsigned char *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
//y stride
size_t plane1_stride = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);
//uv stride
size_t plane2_stride = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1);
//y height
size_t plane1_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0);
//uv height
size_t plane2_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 1);
//y_size
size_t plane1_size = plane1_stride * plane1_height;
//uv_size
size_t plane2_size = plane2_stride * plane2_height;
//yuv_size(内存空间)
size_t frame_size = plane1_size + plane2_size;
// 截取区域不能超出原视频大小
if (screenshots_x >= buffer_width ||
screenshots_width > buffer_width ||
screenshots_x + screenshots_width > buffer_width ||
screenshots_y >= buffer_height ||
screenshots_hight > buffer_height ||
screenshots_y + screenshots_hight > buffer_height) {
return nil;
}
// 1.NV12数据进行相应的裁剪
// 步长必须是16的倍数,因为涉及到字节对齐,而且iOS13和之前的版本处理方式不一样,要注意
int stride_length = 16;
int scale_plane1_stride = screenshots_width;
if ([UIDevice currentDevice].systemVersion.floatValue >= 13.0) {
stride_length = 64;
} else {
stride_length = 16;
}
if ((screenshots_width % stride_length) != 0) {
scale_plane1_stride = (screenshots_width / stride_length + 1) * stride_length;
}
int scale_plane2_stride = scale_plane1_stride;
int scale_plane1_height = screenshots_hight;
int scale_plane2_height = screenshots_hight / 2;
int scale_plane1_size = scale_plane1_stride * scale_plane1_height;
int scale_plane2_size = scale_plane2_stride * scale_plane2_height;
int scale_frame_size = scale_plane1_size + scale_plane2_size;
uint8* scale_buffer = (unsigned char *)malloc(scale_frame_size);
uint8* scale_buffer_u = scale_buffer + scale_plane1_size;
uint8* scale_buffer_v = scale_buffer_u + scale_plane1_size / 4;
libyuv::ConvertToI420(/*const uint8 *src_frame*/ src_y_frame,
/*size_t src_size*/ frame_size,
/*uint8 *dst_y*/ scale_buffer,
/*int dst_stride_y*/ scale_plane1_stride,
/*uint8 *dst_u*/ scale_buffer_u,
/*int dst_stride_u*/ scale_plane1_stride >> 1,
/*uint8 *dst_v*/ scale_buffer_v,
/*int dst_stride_v*/ scale_plane1_stride >> 1,
/*int crop_x*/ screenshots_x,
/*int crop_y*/ screenshots_y,
/*int src_width*/ (int)plane1_stride, // 注意这里使用的是步长,不是宽度,因为yuv内部要根据步长来取U、V数据
/*int src_height*/ (int)buffer_height,
/*int crop_width*/ screenshots_width,
/*int crop_height*/ screenshots_hight,
/*enum RotationMode rotation*/ libyuv::kRotate0,
/*uint32 format*/ libyuv::FOURCC_NV12);
// 2.把缩放后的I420数据转换为NV12
int nv12_plane1_stride = scale_plane1_stride;
int nv12_width = screenshots_width;
int nv12_hight = screenshots_hight;
int nv12_frame_size = scale_frame_size;
uint8 *nv12_dst_y = (uint8 *)malloc(nv12_frame_size);
uint8 *nv12_dst_uv = nv12_dst_y + nv12_plane1_stride * nv12_hight;
libyuv::I420ToNV12(/*const uint8 *src_y*/ scale_buffer,
/*int src_stride_y*/ scale_plane1_stride,
/*const uint8 *src_u*/ scale_buffer_u,
/*int src_stride_u*/ scale_plane1_stride >> 1,
/*const uint8 *src_v*/ scale_buffer_v,
/*int src_stride_v*/ scale_plane1_stride >> 1,
/*uint8 *dst_y*/ nv12_dst_y,
/*int dst_stride_y*/ nv12_plane1_stride,
/*uint8 *dst_uv*/ nv12_dst_uv,
/*int dst_stride_uv*/ nv12_plane1_stride,
/*int width*/ nv12_width,
/*int height*/ nv12_hight);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
free(scale_buffer);
// 4.NV12转换为CVPixelBufferRef
NSDictionary *pixelAttributes = @{(id)kCVPixelBufferIOSurfacePropertiesKey : @{}};
CVPixelBufferRef dstPixelBuffer = NULL;
CVReturn result = CVPixelBufferCreate(kCFAllocatorDefault,
nv12_width, nv12_hight, kCVPixelFormatType_420YpCbCr8BiPlanarFullRange,
(__bridge CFDictionaryRef)pixelAttributes, &dstPixelBuffer);
CVPixelBufferLockBaseAddress(dstPixelBuffer, 0);
uint8_t *yDstPlane = (uint8*)CVPixelBufferGetBaseAddressOfPlane(dstPixelBuffer, 0);
memcpy(yDstPlane, nv12_dst_y, nv12_plane1_stride * nv12_hight);
uint8_t *uvDstPlane = (uint8*)CVPixelBufferGetBaseAddressOfPlane(dstPixelBuffer, 1);
memcpy(uvDstPlane, nv12_dst_uv, nv12_plane1_stride * nv12_hight / 2);
if (result != kCVReturnSuccess) {
NSLog(@"Unable to create cvpixelbuffer %d", result);
}
CVPixelBufferUnlockBaseAddress(dstPixelBuffer, 0);
free(nv12_dst_y);
return dstPixelBuffer;
}
二:CVPixelBufferRef转换为CMSampleBufferRef:
代码语言:javascript
复制
// NV12数据转换为数据流
+ (CMSampleBufferRef)pixelBufferToSampleBuffer:(CVPixelBufferRef)pixelBuffer {
CMSampleBufferRef sampleBuffer;
CMTime frameTime = CMTimeMakeWithSeconds([[NSDate date] timeIntervalSince1970], 1000000000);
CMSampleTimingInfo timing = {frameTime, frameTime, kCMTimeInvalid};
CMVideoFormatDescriptionRef videoInfo = NULL;
CMVideoFormatDescriptionCreateForImageBuffer(NULL, pixelBuffer, &videoInfo);
OSStatus status = CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, true, NULL, NULL, videoInfo, &timing, &sampleBuffer);
if (status != noErr) {
NSLog(@"Failed to create sample buffer with error %d.", (int)status);
}
CVPixelBufferRelease(pixelBuffer);
if (videoInfo) {
CFRelease(videoInfo);
}
return sampleBuffer;
}
三:对NV12裁剪代码2:

其实这个方法更多的是介绍怎么把i420进行裁剪。 我没有单独弄i420文件,这里直接先把NV12转换为i420,再进行裁剪

代码语言:javascript
复制
+ (CVPixelBufferRef)convertNV12ToI420ScreenshotsType1:(CMSampleBufferRef)sampleBufRef screenshotsFrame:(CGRect)screenshotsFrame {
int screenshots_x = screenshotsFrame.origin.x;
int screenshots_y = screenshotsFrame.origin.y;
int screenshots_width = screenshotsFrame.size.width;
int screenshots_hight = screenshotsFrame.size.height;
// 确保宽高是偶数
if (screenshots_width % 2 != 0) {
screenshots_width++;
}
if (screenshots_hight % 2 != 0) {
screenshots_hight++;
}
//CVPixelBufferRef是CVImageBufferRef的别名,两者操作几乎一致。
//获取CMSampleBuffer的图像地址
CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBufRef);
if (!pixelBuffer) {
return nil;
}
//表示开始操作数据
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
//图像宽度(像素)
size_t buffer_width = CVPixelBufferGetWidth(pixelBuffer);
//图像高度(像素)
size_t buffer_height = CVPixelBufferGetHeight(pixelBuffer);
//获取CVImageBufferRef中的y数据
uint8_t *src_y_frame = (unsigned char *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
//获取CMVImageBufferRef中的uv数据
uint8_t *src_uv_frame =(unsigned char *) CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);
//y stride
size_t plane1_stride = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);
//uv stride
size_t plane2_stride = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1);
//y height
size_t plane1_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 0);
//uv height
size_t plane2_height = CVPixelBufferGetHeightOfPlane(pixelBuffer, 1);
//y_size
size_t plane1_size = plane1_stride * plane1_height;
//uv_size
size_t plane2_size = plane2_stride * plane2_height;
//yuv_size(内存空间)
size_t frame_size = plane1_size + plane2_size;
size_t buffer_u_strate = plane2_stride / 2;
size_t buffer_v_strate = plane2_stride / 2;
// 截取区域不能超出原视频大小
if (screenshots_x >= buffer_width ||
screenshots_width > buffer_width ||
screenshots_x + screenshots_width > buffer_width ||
screenshots_y >= buffer_height ||
screenshots_hight > buffer_height ||
screenshots_y + screenshots_hight > buffer_height) {
return nil;
}
// 1.NV12转换为I420
uint8* buffer_frame = (unsigned char *)malloc(frame_size);
uint8* buffer_u = buffer_frame + plane1_size;
uint8* buffer_v = buffer_u + plane1_size / 4;
libyuv::NV12ToI420(/*const uint8 *src_y*/ src_y_frame,
/*int src_stride_y*/ (int)plane1_stride,
/*const uint8 *src_uv*/ src_uv_frame,
/*int src_stride_uv*/ (int)plane2_stride,
/*uint8 *dst_y*/ buffer_frame,
/*int dst_stride_y*/ (int)plane1_stride,
/*uint8 *dst_u*/ buffer_u,
/*int dst_stride_u*/ (int)buffer_u_strate,
/*uint8 *dst_v*/ buffer_v,
/*int dst_stride_v*/ (int)buffer_v_strate,
/*int width*/ (int)buffer_width,
/*int height*/ (int)buffer_height);
// 2.I420数据进行相应的裁剪
// 步长必须是16的倍数,因为涉及到字节对齐,而且iOS13和之前的版本处理方式不一样,要注意
int stride_length = 16;
int scale_plane1_stride = screenshots_width;
if ([UIDevice currentDevice].systemVersion.floatValue >= 13.0) {
stride_length = 64;
} else {
stride_length = 16;
}
if ((screenshots_width % stride_length) != 0) {
scale_plane1_stride = (screenshots_width / stride_length + 1) * stride_length;
}
int scale_plane2_stride = scale_plane1_stride;
int scale_plane1_height = screenshots_hight;
int scale_plane2_height = screenshots_hight / 2;
int scale_plane1_size = scale_plane1_stride * scale_plane1_height;
int scale_plane2_size = scale_plane2_stride * scale_plane2_height;
int scale_frame_size = scale_plane1_size + scale_plane2_size;
uint8* scale_buffer = (unsigned char *)malloc(scale_frame_size);
uint8* scale_buffer_u = scale_buffer + scale_plane1_size;
uint8* scale_buffer_v = scale_buffer_u + scale_plane1_size / 4;
libyuv::ConvertToI420(/*const uint8 *src_frame*/ buffer_frame,
/*size_t src_size*/ frame_size,
/*uint8 *dst_y*/ scale_buffer,
/*int dst_stride_y*/ scale_plane1_stride,
/*uint8 *dst_u*/ scale_buffer_u,
/*int dst_stride_u*/ scale_plane1_stride >> 1,
/*uint8 *dst_v*/ scale_buffer_v,
/*int dst_stride_v*/ scale_plane1_stride >> 1,
/*int crop_x*/ screenshots_x,
/*int crop_y*/ screenshots_y,
/*int src_width*/ (int)plane1_stride,
/*int src_height*/ (int)buffer_height,
/*int crop_width*/ screenshots_width,
/*int crop_height*/ screenshots_hight,
/*enum RotationMode rotation*/ libyuv::kRotate0,
/*uint32 format*/ libyuv::FOURCC_I420);
// 3.把缩放后的I420数据转换为NV12
int nv12_plane1_stride = scale_plane1_stride;
int nv12_width = screenshots_width;
int nv12_hight = screenshots_hight;
int nv12_frame_size = scale_frame_size;
uint8 *nv12_dst_y = (uint8 *)malloc(nv12_frame_size);
uint8 *nv12_dst_uv = nv12_dst_y + nv12_plane1_stride * nv12_hight;
libyuv::I420ToNV12(/*const uint8 *src_y*/ scale_buffer,
/*int src_stride_y*/ scale_plane1_stride,
/*const uint8 *src_u*/ scale_buffer_u,
/*int src_stride_u*/ scale_plane1_stride >> 1,
/*const uint8 *src_v*/ scale_buffer_v,
/*int src_stride_v*/ scale_plane1_stride >> 1,
/*uint8 *dst_y*/ nv12_dst_y,
/*int dst_stride_y*/ nv12_plane1_stride,
/*uint8 *dst_uv*/ nv12_dst_uv,
/*int dst_stride_uv*/ nv12_plane1_stride,
/*int width*/ nv12_width,
/*int height*/ nv12_hight);
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
free(buffer_frame);
free(scale_buffer);
// 4.NV12转换为CVPixelBufferRef
NSDictionary *pixelAttributes = @{(id)kCVPixelBufferIOSurfacePropertiesKey : @{}};
CVPixelBufferRef dstPixelBuffer = NULL;
CVReturn result = CVPixelBufferCreate(kCFAllocatorDefault,
nv12_width, nv12_hight, kCVPixelFormatType_420YpCbCr8BiPlanarFullRange,
(__bridge CFDictionaryRef)pixelAttributes, &dstPixelBuffer);
CVPixelBufferLockBaseAddress(dstPixelBuffer, 0);
uint8_t *yDstPlane = (uint8*)CVPixelBufferGetBaseAddressOfPlane(dstPixelBuffer, 0);
memcpy(yDstPlane, nv12_dst_y, nv12_plane1_stride * nv12_hight);
uint8_t *uvDstPlane = (uint8*)CVPixelBufferGetBaseAddressOfPlane(dstPixelBuffer, 1);
memcpy(uvDstPlane, nv12_dst_uv, nv12_plane1_stride * nv12_hight / 2);
if (result != kCVReturnSuccess) {
NSLog(@"Unable to create cvpixelbuffer %d", result);
}
CVPixelBufferUnlockBaseAddress(dstPixelBuffer, 0);
free(nv12_dst_y);
return dstPixelBuffer;
}

版权声明:本文内容由互联网用户自发贡献,该文观点仅代表作者本人。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌侵权/违法违规的内容, 请发送邮件至 举报,一经查实,本站将立刻删除。

本文参与 腾讯云自媒体同步曝光计划,分享自作者个人站点/博客。
原始发表:2022年9月24日 ,如有侵权请联系 cloudcommunity@tencent.com 删除

本文分享自 作者个人站点/博客 前往查看

如有侵权,请联系 cloudcommunity@tencent.com 删除。

本文参与 腾讯云自媒体同步曝光计划  ,欢迎热爱写作的你一起参与!

评论
登录后参与评论
0 条评论
热度
最新
推荐阅读
目录
  • 这里有几个注意点:
    • 1.iOS13使用了64位对齐,也就是步长是64的倍数。而之前的版本使用的是16位对齐。
      • 2.使用 libyuv::ConvertToI420 方法时,src_width需要填入步长而不是宽度,因为yuv内部要根据步长来取U、V数据,如果是填入的宽度,那么就会取值位移错误,导致转换失真。
        • 3.因为我是直接NV12数据转换的,所以填写的类型是:libyuv::FOURCC_NV12。应该根据当前数据的类型选择对应的格式。
          • 4.NV12转换为CVPixelBufferRef时,填入对应的步长:nv12_plane1_stride。
          • 一:对NV12裁剪代码如下:
          • 二:CVPixelBufferRef转换为CMSampleBufferRef:
          • 三:对NV12裁剪代码2:
          领券
          问题归档专栏文章快讯文章归档关键词归档开发者手册归档开发者手册 Section 归档