简体   繁体   English

如何从NSImage / UIImage获取AVFrame(ffmpeg)

[英]How to get AVFrame(ffmpeg) from NSImage/UIImage

I'd like to convert NSImage/UIImage to AVFrame(ffmpeg). 我想将NSImage / UIImage转换为AVFrame(ffmpeg)。 I found a example code. 我找到了示例代码。 http://lists.mplayerhq.hu/pipermail/libav-user/2010-April/004550.html but this code doesn't work. http://lists.mplayerhq.hu/pipermail/libav-user/2010-April/004550.html,但此代码无效。

I tried another approach. 我尝试了另一种方法。


  AVFrame *frame = avcodec_alloc_frame();

  int numBytes = avpicture_get_size(PIX_FMT_YUV420P, outputWidth, outputHeight);
  uint8_t *buffer = (uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
  avpicture_fill((AVPicture *)frame, buffer, PIX_FMT_YUV420P, outputWidth, outputHeight);


  //UIImage *image = … smothing … ;
  NSImage *image = … smothing … ;

  //CGImageRef newCgImage = image.CGImage;
  CGImageRef newCgImage = [image CGImageForProposedRect:nil context:nil hints:nil];
  //NSBitmapImageRep* bm = [NSBitmapImageRep imageRepWithData:[image TIFFRepresentation]];
  //CGImageRef newCgImage = [bm CGImage];

  size_t w = CGImageGetWidth(newCgImage);
  size_t h = CGImageGetHeight(cgImage);

  CGDataProviderRef dataProvider = CGImageGetDataProvider(newCgImage);
  CFDataRef bitmapData = CGDataProviderCopyData(dataProvider);
  uint8_t *buffer = (uint8_t *)CFDataGetBytePtr(bitmapData);

  frame->linesize[0] = w;
  int y, x;
  for (y = 0; y < h; y++) {
    for (x = 0; x < w; x++) {
      int z = y * w + x;
      frame->data[0][z] = buffer[z];
    }
  }

but this AVFrame give me green picture. 但是这个AVFrame给了我绿色的画面。

Please let me know how can i get it. 请让我知道我如何获得它。

Thanks. 谢谢。

following is additional. 以下是附加的。

I tried again with paying attention color format. 我再次尝试注意颜色格式。 I found example to conver RGB to YUM. 我找到了将RGB转换为YUM的示例。 How to perform RGB->YUV conversion in C/C++? 如何在C / C ++中执行RGB-> YUV转换?

new code is like this.but,still doesn't work… 新代码就是这样。但是,仍然无法正常工作……


#import <Foundation/Foundation.h>
#import <AppKit/AppKit.h>
#import <libavutil/avstring.h>
#import <libavcodec/avcodec.h>
#import <libavformat/avformat.h>
#import <libswscale/swscale.h>

int main(int argc, char *argv[]) {
  NSAutoreleasePool* pool = [[NSAutoreleasePool alloc] init];

  int i;
  int outputWidth  = 480; //size must be same size of arg
  int outputHeight = 360; //size must be same size of arg

  av_register_all();

  AVOutputFormat *format = av_guess_format("mp4", NULL, NULL);
  if(!format) return -1;

  AVFormatContext *outFormatCtx = avformat_alloc_context();
  if(!outFormatCtx) return -1;
  outFormatCtx->oformat  = format;
  av_strlcpy(outFormatCtx->filename, "test.mov", sizeof(outFormatCtx->filename));

  AVStream *vstream = av_new_stream(outFormatCtx, 0);
  if(!vstream) return -1;

  enum CodecID codec_id = av_guess_codec(outFormatCtx->oformat,
                       NULL,
                       outFormatCtx->filename,
                       NULL, CODEC_TYPE_VIDEO);

  AVCodec *ovCodec = avcodec_find_encoder(codec_id);
  if(!ovCodec) return -1;

  AVCodecContext *ovCodecCtx  = vstream->codec;
  ovCodecCtx->codec_id = ovCodec->id;
  ovCodecCtx->codec_type = CODEC_TYPE_VIDEO;
  ovCodecCtx->width = outputWidth;
  ovCodecCtx->height = outputHeight;

  ovCodecCtx->pix_fmt = PIX_FMT_NONE;
  if(ovCodec && ovCodec->pix_fmts){
    const enum PixelFormat *p = ovCodec->pix_fmts;
    while(*p++ != -1){
      if(*p == ovCodecCtx->pix_fmt) break;
    }
    if(*p == -1) ovCodecCtx->pix_fmt = ovCodec->pix_fmts[0];
  }

  ovCodecCtx->time_base.num = 1;
  ovCodecCtx->time_base.den = 30;

  if(format->flags & AVFMT_GLOBALHEADER)
    ovCodecCtx->flags |= CODEC_FLAG_GLOBAL_HEADER;

  if(avcodec_open(ovCodecCtx, ovCodec) != 0) return -1;

  if (! ( format->flags & AVFMT_NOFILE )) {
    if(url_fopen(&outFormatCtx->pb, outFormatCtx->filename, URL_WRONLY) < 0) return NO;
  }

  av_write_header(outFormatCtx);

  int buf_size = ovCodecCtx->width * ovCodecCtx->height * 4;
  uint8_t *buf = av_malloc(buf_size);

  AVFrame *buffer_frame = avcodec_alloc_frame();
  if(!buffer_frame) return -1;

  AVFrame *frame = avcodec_alloc_frame();
  if(!frame) return -1;

  int numBytes = avpicture_get_size(PIX_FMT_YUV420P, outputWidth, outputHeight);
  uint8_t *buffer = (uint8_t *)av_malloc(numBytes*sizeof(uint8_t));
  avpicture_fill((AVPicture *)frame, buffer, PIX_FMT_YUV420P, outputWidth, outputHeight);

  for(i=1;i<argc;i++){
    NSAutoreleasePool *innerPool = [[NSAutoreleasePool alloc] init];

    NSImage *image = [[NSImage alloc] initWithContentsOfFile:[NSString stringWithCString: argv[i] encoding: NSUTF8StringEncoding]];
    CGImageRef imageRef = [image CGImageForProposedRect:nil context:nil hints:nil];

    size_t w = CGImageGetWidth(imageRef);
    size_t h = CGImageGetHeight(imageRef);
    size_t bytesPerRow = CGImageGetBytesPerRow(imageRef);

    CGDataProviderRef dataProvider = CGImageGetDataProvider(imageRef);
    CFDataRef bitmapData = CGDataProviderCopyData(dataProvider);
    uint8_t *buff = (uint8_t *)CFDataGetBytePtr(bitmapData);


    uint8_t R,G,B,Y,U,V;
    int x,y;
    for(y=0;y<h;y++){
      for(x=0;x<w;x++){
        uint8_t *tmp = buff + y * bytesPerRow + x * 4;
        R = *(tmp + 3);
        G = *(tmp + 2);
        B = *(tmp + 1);

        Y = (0.257 * R) + (0.504 * G) + (0.098 * B) + 16;
        U =  -(0.148 * R) - (0.291 * G) + (0.439 * B) + 128;
        V =  (0.439 * R) - (0.368 * G) - (0.071 * B) + 128;
//printf("y:%d x:%d  R:%d,G:%d,B:%d  Y:%d,U:%d,V:%d \n",y,x,R,G,B,Y,U,V);
        frame->data[0][y*frame->linesize[0]+x]= Y;
        //frame->data[1][y*frame->linesize[1]+x]= U; //if coment out "Bus error"
        //frame->data[2][y*frame->linesize[2]+x]= V; //if coment out "Bus error" 
      }
    }

    CGImageRelease(imageRef);
    CFRelease(bitmapData);

    int out_size = avcodec_encode_video (ovCodecCtx, buf, buf_size, frame);

    AVPacket outPacket;
    av_init_packet(&outPacket);

    outPacket.stream_index= vstream->index;
    outPacket.data= buf;
    outPacket.size= out_size;

    //outPacket.pts = ?;
    //outPacket.dts = ?;

    if(ovCodecCtx->coded_frame->key_frame)
      outPacket.flags |= PKT_FLAG_KEY;

    if(av_interleaved_write_frame(outFormatCtx, &outPacket) != 0) return -1;

    [image release];

    [innerPool release];
  }

  av_write_trailer(outFormatCtx);
  if (! ( format->flags & AVFMT_NOFILE ))
    if(url_fclose(outFormatCtx->pb) < 0) return -1;

  avcodec_close(vstream->codec);

  for(i = 0; i < outFormatCtx->nb_streams; i++) {
    av_freep(&outFormatCtx->streams[i]->codec);
    av_freep(&outFormatCtx->streams[i]);
  }
  av_freep(&outFormatCtx);

  av_free(buffer);
  av_free(frame);
  av_free(buffer_frame);

  [pool release];
  return 0;
}

and mekefile is like this.

CC = /usr/bin/gcc 
CFLAGS  = -O4 -Wall -I/usr/local/include
LDFLAGS =
LDLIBS  = -L/usr/local/bin -lavutil -lavformat -lavcodec -lswscale
FRAMEWORK = -framework Foundation -framework AppKit #-framework CoreGraphics

OBJS = test.o 

test: $(OBJS)
    $(CC) -o $@ $(LDFLAGS) $(OBJS) $(LDLIBS) $(FRAMEWORK) -lz  -lbz2 -arch x86_64

Please somebody help me. 请有人帮我。

There is a colorspace mismatch between the data of the CGImage and the destination AVFrame. CGImage的数据和目标AVFrame之间的色彩空间不匹配。 In order to fix that, you need to convert the CGImage data (probably in ARGB) into the YUV420 format (FFMpeg has built-in format converter). 为了解决这个问题,您需要将CGImage数据(可能是ARGB)转换为YUV420格式(FFMpeg内置了格式转换器)。 You can get information on the colorspace of a CGImage with the CGImageGetBitsPerComponent , CGImageGetBitsPerPixel and CGImageGetBytesPerRow functions. 您可以使用CGImageGetBitsPerComponentCGImageGetBitsPerPixelCGImageGetBytesPerRow函数获取有关CGImage色彩空间的信息。

声明:本站的技术帖子网页,遵循CC BY-SA 4.0协议,如果您需要转载,请注明本站网址或者原文地址。任何问题请咨询:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM