ios – RTCI420Frame对象的图像或纹理

我正在为iOS的WebRTC应用程序工作.我的目标是记录来自WebRTC对象的视频.

我有代理RTCVideoRenderer,它为我提供了这种方法.

-(void)renderFrame:(RTCI420Frame *)frame{
}

我的问题是:如何将对象RTCI420Frame转换为有用的对象以显示图像或保存到磁盘.

最佳答案 RTCI420Frames使用YUV420格式.您可以使用OpenCV轻松将它们转换为RGB,然后将它们转换为UIImage.确保#import< RTCI420Frame.h>

-(void) processFrame:(RTCI420Frame *)frame {
    cv::Mat mYUV((int)frame.height + (int)frame.chromaHeight,(int)frame.width, CV_8UC1, (void*) frame.yPlane);
    cv::Mat mRGB((int)frame.height, (int)frame.width, CV_8UC1);
    cvtColor(mYUV, mRGB, CV_YUV2RGB_I420);

    UIImage *image = [self UIImageFromCVMat:mRGB];
}

-(UIImage *)UIImageFromCVMat:(cv::Mat)cvMat
{
    NSData *data = [NSData dataWithBytes:cvMat.data length:cvMat.elemSize()*cvMat.total()];
    CGColorSpaceRef colorSpace;

    if (cvMat.elemSize() == 1) {
        colorSpace = CGColorSpaceCreateDeviceGray();
    } else {
        colorSpace = CGColorSpaceCreateDeviceRGB();
    }

    CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);

    // Creating CGImage from cv::Mat
    CGImageRef imageRef = CGImageCreate(cvMat.cols,
                                        cvMat.rows,
                                        8,
                                        8 * cvMat.elemSize(),
                                        cvMat.step[0],
                                        colorSpace,
                                        kCGImageAlphaNone|kCGBitmapByteOrderDefault,
                                        provider,
                                        NULL,
                                        false,
                                        kCGRenderingIntentDefault
                                        );


    // Getting UIImage from CGImage
    UIImage *finalImage = [UIImage imageWithCGImage:imageRef];
    CGImageRelease(imageRef);
    CGDataProviderRelease(provider);
    CGColorSpaceRelease(colorSpace);

    return finalImage;
}

您可能希望在单独的线程上执行此操作,尤其是在进行任何视频处理时.此外,请记住使用.mm文件扩展名以便使用C.

如果您不想使用OpenCV,可以手动执行.以下代码类型的工作,但颜色搞砸了几秒钟后崩溃.

int width = (int)frame.width;
int height = (int)frame.height;

uint8_t *data = (uint8_t *)malloc(width * height * 4);

const uint8_t* yPlane = frame.yPlane;
const uint8_t* uPlane = frame.uPlane;
const uint8_t* vPlane = frame.vPlane;

for (int i = 0; i < width * height; i++) {
    int rgbOffset = i * 4;
    uint8_t y = yPlane[i];
    uint8_t u = uPlane[i/4];
    uint8_t v = vPlane[i/4];

    uint8_t r = y + 1.402 * (v - 128);
    uint8_t g = y - 0.344 * (u - 128) - 0.714 * (v - 128);
    uint8_t b = y + 1.772 * (u - 128);

    data[rgbOffset] = r;
    data[rgbOffset + 1] = g;
    data[rgbOffset + 2] = b;
    data[rgbOffset + 3] = UINT8_MAX;
}

CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef gtx = CGBitmapContextCreate(data, width, height, 8, width * 4, colorSpace, kCGImageAlphaPremultipliedLast);
CGImageRef cgImage = CGBitmapContextCreateImage(gtx);
UIImage *uiImage = [[UIImage alloc] initWithCGImage:cgImage];

free(data);
点赞