ijkplayer iOS初始化

来源:互联网 发布:淘宝开店规则大全 编辑:程序博客网 时间:2024/05/25 16:38

本文对应的ijkplayer版本为: k0.7.9

ijkplayer iOS初始化

初始化代码
id player = [[IJKFFMoviePlayerController alloc] initWithContentURL:url withOptions:options];
会调用
- (id)initWithContentURLString:(NSString *)aUrlString
withOptions:(IJKFFOptions *)options
{
if (aUrlString == nil)
return nil;

self = [super init];if (self) {    ijkmp_global_init();//主要是ffmpeg的初始化工作, ijkplayer是对ffmpeg的封装    ijkmp_global_set_inject_callback(ijkff_inject_callback);//注册ijkplayer事件相关的回调函数    [IJKFFMoviePlayerController checkIfFFmpegVersionMatch:NO];//检查ijkplayer版本号是否匹配    if (options == nil)        options = [IJKFFOptions optionsByDefault];    // IJKFFIOStatRegister(IJKFFIOStatDebugCallback);    // IJKFFIOStatCompleteRegister(IJKFFIOStatCompleteDebugCallback);    // init fields    _scalingMode = IJKMPMovieScalingModeAspectFit;    _shouldAutoplay = YES;    memset(&_asyncStat, 0, sizeof(_asyncStat));    memset(&_cacheStat, 0, sizeof(_cacheStat));    _monitor = [[IJKFFMonitor alloc] init];    // init media resource    _urlString = aUrlString;    // init player 此处很重要 设置iOS的ijkplayer使用的FFmpeg播放器     _mediaPlayer = ijkmp_ios_create(media_player_msg_loop);    _msgPool = [[IJKFFMoviePlayerMessagePool alloc] init];    IJKWeakHolder *weakHolder = [IJKWeakHolder new];    weakHolder.object = self;//此处向FFmpeg注册IJKFFMoviePlayerController    ijkmp_set_weak_thiz(_mediaPlayer, (__bridge_retained void *) self);    ijkmp_set_inject_opaque(_mediaPlayer, (__bridge_retained void *) weakHolder);    ijkmp_set_ijkio_inject_opaque(_mediaPlayer, (__bridge_retained void *)weakHolder);    ijkmp_set_option_int(_mediaPlayer, IJKMP_OPT_CATEGORY_PLAYER, "start-on-prepared", _shouldAutoplay ? 1 : 0);    // init video sink    _glView = [[IJKSDLGLView alloc] initWithFrame:[[UIScreen mainScreen] bounds]];//此处生成openGL画布    _glView.shouldShowHudView = NO;    _view   = _glView;    [_glView setHudValue:nil forKey:@"scheme"];    [_glView setHudValue:nil forKey:@"host"];    [_glView setHudValue:nil forKey:@"path"];    [_glView setHudValue:nil forKey:@"ip"];    [_glView setHudValue:nil forKey:@"tcp-info"];    [_glView setHudValue:nil forKey:@"http"];    [_glView setHudValue:nil forKey:@"tcp-spd"];    [_glView setHudValue:nil forKey:@"t-prepared"];    [_glView setHudValue:nil forKey:@"t-render"];    [_glView setHudValue:nil forKey:@"t-preroll"];    [_glView setHudValue:nil forKey:@"t-http-open"];    [_glView setHudValue:nil forKey:@"t-http-seek"];    self.shouldShowHudView = options.showHudView;    ijkmp_ios_set_glview(_mediaPlayer, _glView);    ijkmp_set_option(_mediaPlayer, IJKMP_OPT_CATEGORY_PLAYER, "overlay-format", "fcc-_es2");

ifdef DEBUG

    [IJKFFMoviePlayerController setLogLevel:k_IJK_LOG_DEBUG];

else

    [IJKFFMoviePlayerController setLogLevel:k_IJK_LOG_SILENT];

endif

    // init audio sink    [[IJKAudioKit sharedInstance] setupAudioSession];    [options applyTo:_mediaPlayer];    _pauseInBackground = NO;    // init extra    _keepScreenOnWhilePlaying = YES;    [self setScreenOn:YES];    _notificationManager = [[IJKNotificationManager alloc] init];    [self registerApplicationObservers];}return self;

}

其中重要的事件有 当下列事件发生时就会回调对应的函数
AVAPP_EVENT_WILL_HTTP_OPEN http通道 打开事件
AVAPP_CTRL_WILL_TCP_OPEN tcp通道打开事件
AVAPP_CTRL_DID_TCP_OPEN tcp控制通道的打开事件

// NOTE: could be called from multiple thread
static int ijkff_inject_callback(void *opaque, int message, void *data, size_t data_size)
{
IJKWeakHolder weakHolder = (__bridge IJKWeakHolder)opaque;
IJKFFMoviePlayerController *mpc = weakHolder.object;
if (!mpc)
return 0;

switch (message) {    case AVAPP_CTRL_WILL_CONCAT_SEGMENT_OPEN:        return onInjectIOControl(mpc, mpc.segmentOpenDelegate, message, data, data_size);    case AVAPP_CTRL_WILL_TCP_OPEN:        return onInjectTcpIOControl(mpc, mpc.tcpOpenDelegate, message, data, data_size);    case AVAPP_CTRL_WILL_HTTP_OPEN:        return onInjectIOControl(mpc, mpc.httpOpenDelegate, message, data, data_size);    case AVAPP_CTRL_WILL_LIVE_OPEN:        return onInjectIOControl(mpc, mpc.liveOpenDelegate, message, data, data_size);    case AVAPP_EVENT_ASYNC_STATISTIC:        return onInjectAsyncStatistic(mpc, message, data, data_size);    case IJKIOAPP_EVENT_CACHE_STATISTIC:        return onInectIJKIOStatistic(mpc, message, data, data_size);    case AVAPP_CTRL_DID_TCP_OPEN:        return onInjectTcpIOControl(mpc, mpc.tcpOpenDelegate, message, data, data_size);    case AVAPP_EVENT_WILL_HTTP_OPEN:    case AVAPP_EVENT_DID_HTTP_OPEN:    case AVAPP_EVENT_WILL_HTTP_SEEK:    case AVAPP_EVENT_DID_HTTP_SEEK:        return onInjectOnHttpEvent(mpc, message, data, data_size);    default: {        return 0;    }}

}

//生成默认的ijkplayer配置
+ (IJKFFOptions *)optionsByDefault
{
IJKFFOptions *options = [[IJKFFOptions alloc] init];

[options setPlayerOptionIntValue:30     forKey:@"max-fps"];[options setPlayerOptionIntValue:0      forKey:@"framedrop"];[options setPlayerOptionIntValue:3      forKey:@"video-pictq-size"];[options setPlayerOptionIntValue:0      forKey:@"videotoolbox"];[options setPlayerOptionIntValue:960    forKey:@"videotoolbox-max-frame-width"];[options setFormatOptionIntValue:0                  forKey:@"auto_convert"];[options setFormatOptionIntValue:1                  forKey:@"reconnect"];[options setFormatOptionIntValue:30 * 1000 * 1000   forKey:@"timeout"];[options setFormatOptionValue:@"ijkplayer"          forKey:@"user-agent"];options.showHudView   = NO;return options;

}

// init player
_mediaPlayer = ijkmp_ios_create(media_player_msg_loop); //此处很重要 设置iOS的ijkplayer使用的FFmpeg播放器
_msgPool = [[IJKFFMoviePlayerMessagePool alloc] init];//创建ijkplayer的消息循环
IJKWeakHolder *weakHolder = [IJKWeakHolder new];
weakHolder.object = self;

    ijkmp_set_weak_thiz(_mediaPlayer, (__bridge_retained void *) self);    ijkmp_set_inject_opaque(_mediaPlayer, (__bridge_retained void *) weakHolder);    ijkmp_set_ijkio_inject_opaque(_mediaPlayer, (__bridge_retained void *)weakHolder);    ijkmp_set_option_int(_mediaPlayer, IJKMP_OPT_CATEGORY_PLAYER, "start-on-prepared", _shouldAutoplay ? 1 : 0);

看一下
IjkMediaPlayer ijkmp_ios_create(int (*msg_loop)(void))
{
IjkMediaPlayer *mp = ijkmp_create(msg_loop);
if (!mp)
goto fail;

mp->ffplayer->vout = SDL_VoutIos_CreateForGLES2();//并设置视频通过OpenGLES2.0来处理 if (!mp->ffplayer->vout)    goto fail;mp->ffplayer->pipeline = ffpipeline_create_from_ios(mp->ffplayer);//设置iOS的音频if (!mp->ffplayer->pipeline)    goto fail;return mp;

fail:
ijkmp_dec_ref_p(&mp);
return NULL;
}

视频部分的创建比较单一 就是通过openGL来处理
音频部分单独看一下:
IJKFF_Pipeline *ffpipeline_create_from_ios(FFPlayer *ffp)
{
IJKFF_Pipeline *pipeline = ffpipeline_alloc(&g_pipeline_class, sizeof(IJKFF_Pipeline_Opaque));
if (!pipeline)
return pipeline;

IJKFF_Pipeline_Opaque *opaque     = pipeline->opaque;opaque->ffp                       = ffp;pipeline->func_destroy            = func_destroy;

//注意这里音视频有点区别 对于视频来说创建的是视频解码器 对于音频来说创建的是音频播放器 这里音视频是不同的这一点一定要注意
pipeline->func_open_video_decoder = func_open_video_decoder;//这里创建视频解码器
pipeline->func_open_audio_output = func_open_audio_output;//这里创建音频播放器

return pipeline;

}

pipeline->func_open_video_decoder = func_open_video_decoder;
这里创建视频的解码器 这里分为是否使用硬解码

ffp->videotoolbox需要在播前通过如下方法配置:
ijkmp_set_option_int(_mediaPlayer, IJKMP_OPT_CATEGORY_PLAYER, “videotoolbox”, 1);
static IJKFF_Pipenode *func_open_video_decoder(IJKFF_Pipeline *pipeline, FFPlayer *ffp)
{
IJKFF_Pipenode* node = NULL;
IJKFF_Pipeline_Opaque *opaque = pipeline->opaque;
if (ffp->videotoolbox) {
//这里如果是采用硬解码 使用videotoolbox来硬解码
node = ffpipenode_create_video_decoder_from_ios_videotoolbox(ffp);
if (!node)
ALOGE(“vtb fail!!! switch to ffmpeg decode!!!! \n”);
}
if (node == NULL) {
//如果没有设置硬解码 采用FFmpeg来软解
node = ffpipenode_create_video_decoder_from_ffplay(ffp);
ffp->stat.vdec_type = FFP_PROPV_DECODER_AVCODEC;
opaque->is_videotoolbox_open = false;
} else {
ffp->stat.vdec_type = FFP_PROPV_DECODER_VIDEOTOOLBOX;
opaque->is_videotoolbox_open = true;
}
ffp_notify_msg2(ffp, FFP_MSG_VIDEO_DECODER_OPEN, opaque->is_videotoolbox_open);
return node;
}

这里创建iOS的音频播放器
pipeline->func_open_audio_output = func_open_audio_output;//这里创建iOS的音频播放器
static SDL_Aout *func_open_audio_output(IJKFF_Pipeline *pipeline, FFPlayer *ffp)
{
return SDL_AoutIos_CreateForAudioUnit();
}
通过以上调用 音频播放器这样创建
SDL_Aout *SDL_AoutIos_CreateForAudioUnit()
{
SDL_Aout *aout = SDL_Aout_CreateInternal(sizeof(SDL_Aout_Opaque));
if (!aout)
return NULL;

// SDL_Aout_Opaque *opaque = aout->opaque;aout->free_l = aout_free_l;aout->open_audio  = aout_open_audio;aout->pause_audio = aout_pause_audio;aout->flush_audio = aout_flush_audio;aout->close_audio = aout_close_audio;aout->func_set_playback_rate = aout_set_playback_rate;aout->func_set_playback_volume = aout_set_playback_volume;aout->func_get_latency_seconds = auout_get_latency_seconds;aout->func_get_audio_persecond_callbacks = aout_get_persecond_callbacks;return aout;

}

//注意这句
aout->open_audio = aout_open_audio; 这里创建音频播放器
这个函数这个通过IJKSDLAudioQueueController 来创建音频播放器
static int aout_open_audio(SDL_Aout *aout, const SDL_AudioSpec *desired, SDL_AudioSpec *obtained)
{
assert(desired);
SDLTRACE(“aout_open_audio()\n”);
SDL_Aout_Opaque *opaque = aout->opaque;

opaque->aoutController = [[IJKSDLAudioQueueController alloc] initWithAudioSpec:desired];
// opaque->aoutController = [[IJKSDLAudioUnitController alloc] initWithAudioSpec:desired];
if (!opaque->aoutController) {
ALOGE(“aout_open_audio_n: failed to new AudioTrcak()\n”);
return -1;
}

if (obtained)    *obtained = opaque->aoutController.spec;return 0;

}
ijkplayer默认创建一个基于Audio Queue的音频播放器
音频和视频部分后续单独讲解!
这里我们可以知道在ijkplayer初始化部分视频通过openGL来渲染,音频通过Audio Queue来播放
然后继续向下看
// init video sink
_glView = [[IJKSDLGLView alloc] initWithFrame:[[UIScreen mainScreen] bounds]];
_glView.shouldShowHudView = NO;
_view = _glView;
这里是设置openGL的渲染视图

ifdef DEBUG

    [IJKFFMoviePlayerController setLogLevel:k_IJK_LOG_DEBUG];

else

    [IJKFFMoviePlayerController setLogLevel:k_IJK_LOG_SILENT];

endif

这里是设置ijkplayer的log输出等级
log等级为
typedef enum IJKLogLevel {
k_IJK_LOG_UNKNOWN = 0,
k_IJK_LOG_DEFAULT = 1,

k_IJK_LOG_VERBOSE = 2,//较为啰嗦的log等级k_IJK_LOG_DEBUG   = 3,k_IJK_LOG_INFO    = 4,k_IJK_LOG_WARN    = 5,k_IJK_LOG_ERROR   = 6,//普通错误k_IJK_LOG_FATAL   = 7,//毁灭性的错误k_IJK_LOG_SILENT  = 8,//静默 什么log都不输出

} IJKLogLevel;

  • (void)setLogLevel:(IJKLogLevel)logLevel
    {
    ijkmp_global_set_log_level(logLevel);
    }

void ffp_global_set_log_level(int log_level)
{
int av_level = log_level_ijk_to_av(log_level);
av_log_set_level(av_level);
}

这里需要特别说明一下
ijkplayer设置log的等级最终会处理为FFmpeg的log等级
FFmpeg的log等级为
/**
* Print no output.
*/

define AV_LOG_QUIET -8

/**
* Something went really wrong and we will crash now.
*/

define AV_LOG_PANIC 0

/**
* Something went wrong and recovery is not possible.
* For example, no header was found for a format which depends
* on headers or an illegal combination of parameters is used.
*/

define AV_LOG_FATAL 8

/**
* Something went wrong and cannot losslessly be recovered.
* However, not all future data is affected.
*/

define AV_LOG_ERROR 16

/**
* Something somehow does not look correct. This may or may not
* lead to problems. An example would be the use of ‘-vstrict -2’.
*/

define AV_LOG_WARNING 24

/**
* Standard information.
*/

define AV_LOG_INFO 32

/**
* Detailed information.
*/

define AV_LOG_VERBOSE 40

/**
* Stuff which is only useful for libav* developers.
*/

define AV_LOG_DEBUG 48

/**
* Extremely verbose debugging, useful for libav* development.
*/

define AV_LOG_TRACE 56

[self registerApplicationObservers];
//这里是注册系统的一些通知

到这里ijkplayer的初始化还没有结束
因为iOS端的ijkplayer在播放器需要调用
- (void)prepareToPlay;//接口才能播放
那这个prepareToPlay函数都干了那些事情呢?
- (void)prepareToPlay
{
if (!_mediaPlayer)
return;

[self setScreenOn:_keepScreenOnWhilePlaying];

//这里设置视频的视频源
ijkmp_set_data_source(_mediaPlayer, [_urlString UTF8String]);
ijkmp_set_option(_mediaPlayer, IJKMP_OPT_CATEGORY_FORMAT, “safe”, “0”); // for concat demuxer

// 获取从SDL库初始化(定时器模块初始化)开始到当前的运行时间(ms);
_monitor.prepareStartTick = (int64_t)SDL_GetTickHR();
ijkmp_prepare_async(_mediaPlayer);
}

int ijkmp_set_data_source(IjkMediaPlayer *mp, const char *url)
{
assert(mp);
assert(url);
MPTRACE(“ijkmp_set_data_source(url=\”%s\”)\n”, url);
pthread_mutex_lock(&mp->mutex);
int retval = ijkmp_set_data_source_l(mp, url);
pthread_mutex_unlock(&mp->mutex);
MPTRACE(“ijkmp_set_data_source(url=\”%s\”)=%d\n”, url, retval);
return retval;
}
static int ijkmp_set_data_source_l(IjkMediaPlayer *mp, const char *url)
{
assert(mp);
assert(url);

// MPST_RET_IF_EQ(mp->mp_state, MP_STATE_IDLE);MPST_RET_IF_EQ(mp->mp_state, MP_STATE_INITIALIZED);MPST_RET_IF_EQ(mp->mp_state, MP_STATE_ASYNC_PREPARING);MPST_RET_IF_EQ(mp->mp_state, MP_STATE_PREPARED);MPST_RET_IF_EQ(mp->mp_state, MP_STATE_STARTED);MPST_RET_IF_EQ(mp->mp_state, MP_STATE_PAUSED);MPST_RET_IF_EQ(mp->mp_state, MP_STATE_COMPLETED);MPST_RET_IF_EQ(mp->mp_state, MP_STATE_STOPPED);MPST_RET_IF_EQ(mp->mp_state, MP_STATE_ERROR);MPST_RET_IF_EQ(mp->mp_state, MP_STATE_END);freep((void**)&mp->data_source);mp->data_source = strdup(url);if (!mp->data_source)    return EIJK_OUT_OF_MEMORY;ijkmp_change_state_l(mp, MP_STATE_INITIALIZED);//这里发送MP_STATE_INITIALIZED信号 通过pthread_cond_signal(&cond->id);发送return 0;

}

ijkmp_change_state_l(mp, MP_STATE_INITIALIZED);
在ijkmp_change_state_l里面封装了一个what为MP_STATE_INITIALIZED的AVMessage,最后把其放入ijkplayer->ffplayer->msg_queue中,然后调用:
void ijkmp_change_state_l(IjkMediaPlayer *mp, int new_state)
{
mp->mp_state = new_state;
ffp_notify_msg1(mp->ffplayer, FFP_MSG_PLAYBACK_STATE_CHANGED);//和之前一样,ijkmp_change_state_l()向msg_queue发送一个FFP_MSG_PLAYBACK_STATE_CHANGED状态
}
inline static void ffp_notify_msg1(FFPlayer *ffp, int what) {
msg_queue_put_simple3(&ffp->msg_queue, what, 0, 0);
}
inline static void msg_queue_put_simple3(MessageQueue *q, int what, int arg1, int arg2)
{
AVMessage msg;
msg_init_msg(&msg);
msg.what = what;
msg.arg1 = arg1;
msg.arg2 = arg2;
msg_queue_put(q, &msg);
}
inline static int msg_queue_put(MessageQueue *q, AVMessage *msg)
{
int ret;

SDL_LockMutex(q->mutex);ret = msg_queue_put_private(q, msg);SDL_UnlockMutex(q->mutex);return ret;

}
inline static int msg_queue_put_private(MessageQueue *q, AVMessage *msg)
{
AVMessage *msg1;

if (q->abort_request)    return -1;

ifdef FFP_MERGE

msg1 = av_malloc(sizeof(AVMessage));

else

msg1 = q->recycle_msg;if (msg1) {    q->recycle_msg = msg1->next;    q->recycle_count++;} else {    q->alloc_count++;    msg1 = av_malloc(sizeof(AVMessage));}

ifdef FFP_SHOW_MSG_RECYCLE

int total_count = q->recycle_count + q->alloc_count;if (!(total_count % 10)) {    av_log(NULL, AV_LOG_DEBUG, "msg-recycle \t%d + \t%d = \t%d\n", q->recycle_count, q->alloc_count, total_count);}

endif

endif

if (!msg1)    return -1;*msg1 = *msg;msg1->next = NULL;if (!q->last_msg)    q->first_msg = msg1;else    q->last_msg->next = msg1;q->last_msg = msg1;q->nb_messages++;SDL_CondSignal(q->cond);return 0;

}
int SDL_CondSignal(SDL_cond *cond)
{
assert(cond);
if (!cond)
return -1;

return pthread_cond_signal(&cond->id);//这里使用了一个条件锁

}
cond->id其实是ijkplayer->ffplayer->msg_queue->cond->id,类型为pthread_cond_t。
那么pthread_cond_signal这个函数是干嘛的呢?
其实在liunx里面,pthread_cond_signal函数的作用是发送一个信号给另外一个正在处于阻塞等待状态的线程,使其脱离阻塞状态,继续执行,当某个线程继续执行的时候发现msg_queue中有msg的时候会有相应操作。到底这里把是发给谁呢?请继续往下面读。
这个函数最终调用了
ijkmp_prepare_async(_mediaPlayer);
int ijkmp_prepare_async(IjkMediaPlayer *mp)
{
assert(mp);
MPTRACE(“ijkmp_prepare_async()\n”);
pthread_mutex_lock(&mp->mutex);
int retval = ijkmp_prepare_async_l(mp);
pthread_mutex_unlock(&mp->mutex);
MPTRACE(“ijkmp_prepare_async()=%d\n”, retval);
return retval;
}
static int ijkmp_prepare_async_l(IjkMediaPlayer *mp)
{
assert(mp);

MPST_RET_IF_EQ(mp->mp_state, MP_STATE_IDLE);// MPST_RET_IF_EQ(mp->mp_state, MP_STATE_INITIALIZED);MPST_RET_IF_EQ(mp->mp_state, MP_STATE_ASYNC_PREPARING);MPST_RET_IF_EQ(mp->mp_state, MP_STATE_PREPARED);MPST_RET_IF_EQ(mp->mp_state, MP_STATE_STARTED);MPST_RET_IF_EQ(mp->mp_state, MP_STATE_PAUSED);MPST_RET_IF_EQ(mp->mp_state, MP_STATE_COMPLETED);// MPST_RET_IF_EQ(mp->mp_state, MP_STATE_STOPPED);MPST_RET_IF_EQ(mp->mp_state, MP_STATE_ERROR);MPST_RET_IF_EQ(mp->mp_state, MP_STATE_END);assert(mp->data_source);//发送个准备消息ijkmp_change_state_l(mp, MP_STATE_ASYNC_PREPARING);//msg_queue_start开启消息循环队列msg_queue_start(&mp->ffplayer->msg_queue);// released in msg_loopijkmp_inc_ref(mp);//这里创建一个ijkmp_msg_loop消息循环线程//这里创建了一个msg_loop的消息循环子线程,线程入口为ijkmp_msg_loop//ijkplayer的消息在ijkmp_msg_loop函数里面进行处理//ijkmp_msg_loop方法中调用的即是mp->msg_loopmp->msg_thread = SDL_CreateThreadEx(&mp->_msg_thread, ijkmp_msg_loop, mp, "ff_msg_loop");// msg_thread is detached inside msg_loop// TODO: 9 release weak_thiz if pthread_create() failed;int retval = ffp_prepare_async_l(mp->ffplayer, mp->data_source);if (retval < 0) {    ijkmp_change_state_l(mp, MP_STATE_ERROR);    return retval;}return 0;

}

inline static void msg_queue_start(MessageQueue *q)
{
SDL_LockMutex(q->mutex);
q->abort_request = 0;

AVMessage msg;msg_init_msg(&msg);msg.what = FFP_MSG_FLUSH;//这里又发送了一个状态FFP_MSG_FLUSH,然而函数名是msg_queue_start(),这是什么意思呢?msg_queue_put_private(q, &msg);SDL_UnlockMutex(q->mutex);

}

int ffp_prepare_async_l(FFPlayer *ffp, const char *file_name)
{
assert(ffp);
assert(!ffp->is);
assert(file_name);

if (av_stristart(file_name, "rtmp", NULL) ||    av_stristart(file_name, "rtsp", NULL)) {    // There is total different meaning for 'timeout' option in rtmp    av_log(ffp, AV_LOG_WARNING, "remove 'timeout' option for rtmp.\n");    av_dict_set(&ffp->format_opts, "timeout", NULL, 0);}/* there is a length limit in avformat */if (strlen(file_name) + 1 > 1024) {    av_log(ffp, AV_LOG_ERROR, "%s too long url\n", __func__);    if (avio_find_protocol_name("ijklongurl:")) {        av_dict_set(&ffp->format_opts, "ijklongurl-url", file_name, 0);        file_name = "ijklongurl:";    }}av_log(NULL, AV_LOG_INFO, "===== versions =====\n");ffp_show_version_str(ffp, "ijkplayer",      ijk_version_info());ffp_show_version_str(ffp, "FFmpeg",         av_version_info());ffp_show_version_int(ffp, "libavutil",      avutil_version());ffp_show_version_int(ffp, "libavcodec",     avcodec_version());ffp_show_version_int(ffp, "libavformat",    avformat_version());ffp_show_version_int(ffp, "libswscale",     swscale_version());ffp_show_version_int(ffp, "libswresample",  swresample_version());av_log(NULL, AV_LOG_INFO, "===== options =====\n");ffp_show_dict(ffp, "player-opts", ffp->player_opts);ffp_show_dict(ffp, "format-opts", ffp->format_opts);ffp_show_dict(ffp, "codec-opts ", ffp->codec_opts);ffp_show_dict(ffp, "sws-opts   ", ffp->sws_dict);ffp_show_dict(ffp, "swr-opts   ", ffp->swr_opts);av_log(NULL, AV_LOG_INFO, "===================\n");av_opt_set_dict(ffp, &ffp->player_opts);if (!ffp->aout) {    ffp->aout = ffpipeline_open_audio_output(ffp->pipeline, ffp);    if (!ffp->aout)        return -1;}

if CONFIG_AVFILTER

if (ffp->vfilter0) {    GROW_ARRAY(ffp->vfilters_list, ffp->nb_vfilters);    ffp->vfilters_list[ffp->nb_vfilters - 1] = ffp->vfilter0;}

endif

VideoState *is = stream_open(ffp, file_name, NULL);if (!is) {    av_log(NULL, AV_LOG_WARNING, "ffp_prepare_async_l: stream_open failed OOM");    return EIJK_OUT_OF_MEMORY;}ffp->is = is;ffp->input_filename = av_strdup(file_name);return 0;

}

ffp->aout = ffpipeline_open_audio_output(ffp->pipeline, ffp);//打开音频输出
VideoState *is = stream_open(ffp, file_name, NULL);
is->video_refresh_tid = SDL_CreateThreadEx(&is->_video_refresh_tid, video_refresh_thread, ffp, “ff_vout”);//创建视频刷新线程 视频显示线程
is->read_tid = SDL_CreateThreadEx(&is->_read_tid, read_thread, ffp, “ff_read”);//创建读线程 read_thread为入口的网络数据或者本地文件的数据读取线程。
其实在read_thread里面还向msg_queue发送了一个消息,那就是ffp_notify_msg1(ffp, FFP_MSG_PREPARED);,
消息循环线程收到这个消息后,
//这里是消息循环处理函数
int media_player_msg_loop(void* arg)
{
@autoreleasepool {
IjkMediaPlayer mp = (IjkMediaPlayer)arg;
__weak IJKFFMoviePlayerController *ffpController = ffplayerRetain(ijkmp_set_weak_thiz(mp, NULL));
while (ffpController) {
@autoreleasepool {
IJKFFMoviePlayerMessage *msg = [ffpController obtainMessage];
if (!msg)
break;

            int retval = ijkmp_get_msg(mp, &msg->_msg, 1);            if (retval < 0)                break;            // block-get should never return 0            assert(retval > 0);            [ffpController performSelectorOnMainThread:@selector(postEvent:) withObject:msg waitUntilDone:NO];        }    }    // retained in prepare_async, before SDL_CreateThreadEx    ijkmp_dec_ref_p(&mp);    return 0;}

}

FFP_MSG_PREPARED消息先在ijkmp_get_msg函数中被处理
int retval = ijkmp_get_msg(mp, &msg->_msg, 1);
case FFP_MSG_PREPARED:
MPTRACE(“ijkmp_get_msg: FFP_MSG_PREPARED\n”);
pthread_mutex_lock(&mp->mutex);
if (mp->mp_state == MP_STATE_ASYNC_PREPARING) {
ijkmp_change_state_l(mp, MP_STATE_PREPARED);
} else {
// FIXME: 1: onError() ?
av_log(mp->ffplayer, AV_LOG_DEBUG, “FFP_MSG_PREPARED: expecting mp_state==MP_STATE_ASYNC_PREPARING\n”);
}
if (ffp_is_paused_l(mp->ffplayer)) {
ijkmp_change_state_l(mp, MP_STATE_PAUSED);
}
pthread_mutex_unlock(&mp->mutex);
break;

//如果mp MediaPlayer 当前状态是MP_STATE_ASYNC_PREPARING 就发送MP_STATE_PREPARED消息
ijkmp_change_state_l(mp, MP_STATE_PREPARED);
MP_STATE_PREPARED
消息又会被处理
- (IJKMPMoviePlaybackState)playbackState
{
if (!_mediaPlayer)
return NO;

IJKMPMoviePlaybackState mpState = IJKMPMoviePlaybackStateStopped;int state = ijkmp_get_state(_mediaPlayer);switch (state) {    case MP_STATE_STOPPED:    case MP_STATE_COMPLETED:    case MP_STATE_ERROR:    case MP_STATE_END:        mpState = IJKMPMoviePlaybackStateStopped;        break;    case MP_STATE_IDLE:    case MP_STATE_INITIALIZED:    case MP_STATE_ASYNC_PREPARING:    case MP_STATE_PAUSED:        mpState = IJKMPMoviePlaybackStatePaused;        break;    case MP_STATE_PREPARED:    case MP_STATE_STARTED: {        if (_seeking)            mpState = IJKMPMoviePlaybackStateSeekingForward;        else            mpState = IJKMPMoviePlaybackStatePlaying;        break;    }}// IJKMPMoviePlaybackStatePlaying,// IJKMPMoviePlaybackStatePaused,// IJKMPMoviePlaybackStateStopped,// IJKMPMoviePlaybackStateInterrupted,// IJKMPMoviePlaybackStateSeekingForward,// IJKMPMoviePlaybackStateSeekingBackwardreturn mpState;

}

//当ijkmp_get_msg函数处理过MP_STATE_PREPARED消息后此消息被送到
- (void)postEvent: (IJKFFMoviePlayerMessage *)msg函数进行处理
在postEvent函数内部 这样处理MP_STATE_PREPARED消息
case FFP_MSG_PREPARED: {
NSLog(@”FFP_MSG_PREPARED:\n”);

        _monitor.prepareDuration = (int64_t)SDL_GetTickHR() - _monitor.prepareStartTick;        int64_t vdec = ijkmp_get_property_int64(_mediaPlayer, FFP_PROP_INT64_VIDEO_DECODER, FFP_PROPV_DECODER_UNKNOWN);        switch (vdec) {            case FFP_PROPV_DECODER_VIDEOTOOLBOX:                _monitor.vdecoder = @"VideoToolbox";                break;            case FFP_PROPV_DECODER_AVCODEC:                _monitor.vdecoder = [NSString stringWithFormat:@"avcodec %d.%d.%d",                                     LIBAVCODEC_VERSION_MAJOR,                                     LIBAVCODEC_VERSION_MINOR,                                     LIBAVCODEC_VERSION_MICRO];                break;            default:                _monitor.vdecoder = @"Unknown";                break;        }        IjkMediaMeta *rawMeta = ijkmp_get_meta_l(_mediaPlayer);        if (rawMeta) {            ijkmeta_lock(rawMeta);            NSMutableDictionary *newMediaMeta = [[NSMutableDictionary alloc] init];            fillMetaInternal(newMediaMeta, rawMeta, IJKM_KEY_FORMAT, nil);            fillMetaInternal(newMediaMeta, rawMeta, IJKM_KEY_DURATION_US, nil);            fillMetaInternal(newMediaMeta, rawMeta, IJKM_KEY_START_US, nil);            fillMetaInternal(newMediaMeta, rawMeta, IJKM_KEY_BITRATE, nil);            fillMetaInternal(newMediaMeta, rawMeta, IJKM_KEY_VIDEO_STREAM, nil);            fillMetaInternal(newMediaMeta, rawMeta, IJKM_KEY_AUDIO_STREAM, nil);            int64_t video_stream = ijkmeta_get_int64_l(rawMeta, IJKM_KEY_VIDEO_STREAM, -1);            int64_t audio_stream = ijkmeta_get_int64_l(rawMeta, IJKM_KEY_AUDIO_STREAM, -1);            NSMutableArray *streams = [[NSMutableArray alloc] init];            size_t count = ijkmeta_get_children_count_l(rawMeta);            for(size_t i = 0; i < count; ++i) {                IjkMediaMeta *streamRawMeta = ijkmeta_get_child_l(rawMeta, i);                NSMutableDictionary *streamMeta = [[NSMutableDictionary alloc] init];                if (streamRawMeta) {                    fillMetaInternal(streamMeta, streamRawMeta, IJKM_KEY_TYPE, k_IJKM_VAL_TYPE__UNKNOWN);                    const char *type = ijkmeta_get_string_l(streamRawMeta, IJKM_KEY_TYPE);                    if (type) {                        fillMetaInternal(streamMeta, streamRawMeta, IJKM_KEY_CODEC_NAME, nil);                        fillMetaInternal(streamMeta, streamRawMeta, IJKM_KEY_CODEC_PROFILE, nil);                        fillMetaInternal(streamMeta, streamRawMeta, IJKM_KEY_CODEC_LONG_NAME, nil);                        fillMetaInternal(streamMeta, streamRawMeta, IJKM_KEY_BITRATE, nil);                        if (0 == strcmp(type, IJKM_VAL_TYPE__VIDEO)) {                            fillMetaInternal(streamMeta, streamRawMeta, IJKM_KEY_WIDTH, nil);                            fillMetaInternal(streamMeta, streamRawMeta, IJKM_KEY_HEIGHT, nil);                            fillMetaInternal(streamMeta, streamRawMeta, IJKM_KEY_FPS_NUM, nil);                            fillMetaInternal(streamMeta, streamRawMeta, IJKM_KEY_FPS_DEN, nil);                            fillMetaInternal(streamMeta, streamRawMeta, IJKM_KEY_TBR_NUM, nil);                            fillMetaInternal(streamMeta, streamRawMeta, IJKM_KEY_TBR_DEN, nil);                            fillMetaInternal(streamMeta, streamRawMeta, IJKM_KEY_SAR_NUM, nil);                            fillMetaInternal(streamMeta, streamRawMeta, IJKM_KEY_SAR_DEN, nil);                            if (video_stream == i) {                                _monitor.videoMeta = streamMeta;                                int64_t fps_num = ijkmeta_get_int64_l(streamRawMeta, IJKM_KEY_FPS_NUM, 0);                                int64_t fps_den = ijkmeta_get_int64_l(streamRawMeta, IJKM_KEY_FPS_DEN, 0);                                if (fps_num > 0 && fps_den > 0) {                                    _fpsInMeta = ((CGFloat)(fps_num)) / fps_den;                                    NSLog(@"fps in meta %f\n", _fpsInMeta);                                }                            }                        } else if (0 == strcmp(type, IJKM_VAL_TYPE__AUDIO)) {                            fillMetaInternal(streamMeta, streamRawMeta, IJKM_KEY_SAMPLE_RATE, nil);                            fillMetaInternal(streamMeta, streamRawMeta, IJKM_KEY_CHANNEL_LAYOUT, nil);                            if (audio_stream == i) {                                _monitor.audioMeta = streamMeta;                            }                        }                    }                }                [streams addObject:streamMeta];            }            [newMediaMeta setObject:streams forKey:kk_IJKM_KEY_STREAMS];            ijkmeta_unlock(rawMeta);            _monitor.mediaMeta = newMediaMeta;        }        ijkmp_set_playback_rate(_mediaPlayer, [self playbackRate]);        ijkmp_set_playback_volume(_mediaPlayer, [self playbackVolume]);        [self startHudTimer];        _isPreparedToPlay = YES;        [[NSNotificationCenter defaultCenter] postNotificationName:IJKMPMediaPlaybackIsPreparedToPlayDidChangeNotification object:self];        _loadState = IJKMPMovieLoadStatePlayable | IJKMPMovieLoadStatePlaythroughOK;        [[NSNotificationCenter defaultCenter]         postNotificationName:IJKMPMoviePlayerLoadStateDidChangeNotification         object:self];        break;    }

会设置
ijkmp_set_playback_rate(_mediaPlayer, [self playbackRate]);
ijkmp_set_playback_volume(_mediaPlayer, [self playbackVolume]);
并开启hud的定时器
[self startHudTimer];
同时发出
[[NSNotificationCenter defaultCenter] postNotificationName:IJKMPMediaPlaybackIsPreparedToPlayDidChangeNotification object:self];
_loadState = IJKMPMovieLoadStatePlayable | IJKMPMovieLoadStatePlaythroughOK;

        [[NSNotificationCenter defaultCenter]         postNotificationName:IJKMPMoviePlayerLoadStateDidChangeNotification         object:self];

IJKFFMoviePlayerController播放器的通知

原创粉丝点击