(關於FFmpeg的編譯包,運用的是3.2.2,能夠不同編譯包API局部有變化:下載地址:後續上傳)
視頻采集後經過硬件編碼為H264,然後經過網絡傳輸(RTSP、RTMP)協議。
當我們失掉H264編碼的數據後需求停止解碼,從而將數據復原,停止播放。
iphone是有硬件解碼的,但是並沒有提供相關API,一切我們沒有方法停止硬件解碼,只能經過軟件解碼–ffmpeg。
失掉YUV格式的數據後有兩種處理方案:
本文引見如何解碼,經過opengl顯示下一篇文章會有引見
解碼H264 Demo構造、留意FFmpeg所需的依賴庫 定義YUV構造,新建DecodeH264Data_YUV.h文件//
// H264DecoderDemo
//
// Created by xoxo_X on 2017/2/11.
// Copyright © 2017年 xoxo_X. All rights reserved.
//
#ifndef _DECODEH264DATA_YUV_
#define _DECODEH264DATA_YUV_
#pragma pack(push, 1)
typedef struct H264FrameDef
{
unsigned int length;
unsigned char* dataBuffer;
}H264Frame;
typedef struct H264YUVDef
{
unsigned int width;
unsigned int height;
H264Frame luma;
H264Frame chromaB;
H264Frame chromaR;
}H264YUV_Frame;
#pragma pack(pop)
#endif
解碼.m文件
//
// H264Decoder.m
// H264DecoderDemo
//
// Created by 馮士魁 on 2017/2/11.
// Copyright © 2017年 馮士魁. All rights reserved.
//
#import "H264Decoder.h"
@implementation H264Decoder
@synthesize updateDelegate;
- (id)init{
if(self = [super init]){
pCodec = NULL;
pCodecCtx = NULL;
pVideoFrame = NULL;
av_register_all();
avcodec_register_all();
pCodec = avcodec_find_decoder(AV_CODEC_ID_H264);
if (!pCodec) {
printf("Codec not find\n");
}
pCodecCtx = avcodec_alloc_context3(pCodec);
if (!pCodecCtx) {
printf("alloc codec context error\n");
}
avcodec_open2(pCodecCtx, pCodec, NULL);
pVideoFrame = av_frame_alloc();
}
return self;
}
- (void)dealloc{
if (!pCodecCtx) {
avcodec_close(pCodecCtx);
pCodecCtx = NULL;
}
if (!pVideoFrame) {
av_frame_free(&pVideoFrame);
}
[super dealloc];
}
- (int)DecodeH264Frames: (unsigned char*)inputBuffer withLength:(int)aLength
{
int gotPicPtr=0;
int result=0;
av_init_packet(&pAvPackage);
pAvPackage->data=(unsigned char*)inputBuffer;
pAvPackage->size=aLength;
//解碼
result=avcodec_decode_video2(pCodecCtx, pVideoFrame, &gotPicPtr, &pAvPackage);
//假如視頻尺寸更改,我們丟掉這個frame
if((pictureWidth!=0)&&(pictureWidth!=pCodecCtx->width)){
pictureWidth=pCodecCtx->width;
return -1;
}
//YUV 420 Y U V
if(gotPicPtr)
{
unsigned int lumaLength= (pCodecCtx->height)*(MIN(pVideoFrame->linesize[0], pCodecCtx->width));
unsigned int chromBLength=((pCodecCtx->height)/2)*(MIN(pVideoFrame->linesize[1], (pCodecCtx->width)/2));
unsigned int chromRLength=((pCodecCtx->height)/2)*(MIN(pVideoFrame->linesize[2], (pCodecCtx->width)/2));
H264YUV_Frame yuvFrame;
memset(&yuvFrame, 0, sizeof(H264YUV_Frame));
yuvFrame.luma.length = lumaLength;
yuvFrame.chromaB.length = chromBLength;
yuvFrame.chromaR.length =chromRLength;
yuvFrame.luma.dataBuffer=(unsigned char*)malloc(lumaLength);
yuvFrame.chromaB.dataBuffer=(unsigned char*)malloc(chromBLength);
yuvFrame.chromaR.dataBuffer=(unsigned char*)malloc(chromRLength);
copyDecodedFrame(pVideoFrame->data[0],yuvFrame.luma.dataBuffer,pVideoFrame->linesize[0],
pCodecCtx->width,pCodecCtx->height);
copyDecodedFrame(pVideoFrame->data[1], yuvFrame.chromaB.dataBuffer,pVideoFrame->linesize[1],
pCodecCtx->width / 2,pCodecCtx->height / 2);
copyDecodedFrame(pVideoFrame->data[2], yuvFrame.chromaR.dataBuffer,pVideoFrame->linesize[2],
pCodecCtx->width / 2,pCodecCtx->height / 2);
yuvFrame.width=pCodecCtx->width;
yuvFrame.height=pCodecCtx->height;
dispatch_sync(dispatch_get_main_queue(), ^{
//這個時分回到主線程將yuv數據放到opengl中
[self updateYUVFrameOnMainThread:(H264YUV_Frame*)&yuvFrame];
});
free(yuvFrame.luma.dataBuffer);
free(yuvFrame.chromaB.dataBuffer);
free(yuvFrame.chromaR.dataBuffer);
}
av_free_packet(&pAvPackage);
return 0;
}
void copyDecodedFrame(unsigned char *src, unsigned char *dist,int linesize, int width, int height)
{
width = MIN(linesize, width);
for (NSUInteger i = 0; i < height; ++i) {
memcpy(dist, src, width);
dist += width;
src += linesize;
}
}
- (void)updateYUVFrameOnMainThread:(H264YUV_Frame*)yuvFrame
{
if(yuvFrame!=NULL){
if([self.updateDelegate respondsToSelector:@selector(updateDecodedH264FrameData: )]){
[self.updateDelegate updateDecodedH264FrameData:yuvFrame];
}
}
}
@end
解碼.h文件
//
// H264Decoder.h 解碼
// H264DecoderDemo
//
// Created by 馮士魁 on 2017/2/11.
// Copyright © 2017年 馮士魁. All rights reserved.
//
#import <Foundation/Foundation.h>
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#import "DecodeH264Data_YUV.h"
@protocol updateDecodedH264FrameDelegate <NSObject>
@optional
- (void)updateDecodedH264FrameData:(H264YUV_Frame *)yuvFrame;
@end
@interface H264Decoder : NSObject{
int pictureWidth;
AVCodec *pCodec;
AVCodecContext *pCodecCtx;
AVFrame *pVideoFrame;
AVPacket *pAvPackage;
}
@property (nonatomic,assign)id<updateDecodedH264FrameDelegate>updateDelegate;
- (id)init;
-(int)decodeH264Frames:(unsigned char *)inputBuffer withLength:(int)aLength;
@end
【ios 解碼H264為YUV格式】的相關資料介紹到這裡,希望對您有所幫助! 提示:不會對讀者因本文所帶來的任何損失負責。如果您支持就請把本站添加至收藏夾哦!