从本文开始讲解 ijkplayer 相关的,本篇主要将播放器从初始化到 onPrepared
回调回来之间的操作
public final class IjkMediaPlayer extends AbstractMediaPlayer { /** * Default library loader * Load them by yourself, if your libraries are not installed at default place. */ private static final IjkLibLoader sLocalLibLoader = new IjkLibLoader() { @Override public void loadLibrary(String libName) throws UnsatisfiedLinkError, SecurityException { System.loadLibrary(libName); } }; /** * Default constructor. Consider using one of the create() methods for * synchronously instantiating a IjkMediaPlayer from a Uri or resource. * <p> * When done with the IjkMediaPlayer, you should call {@link #release()}, to * free the resources. If not released, too many IjkMediaPlayer instances * may result in an exception. * </p> */ public IjkMediaPlayer() { this(sLocalLibLoader); } /** * do not loadLibaray * @param libLoader * custom library loader, can be null. */ public IjkMediaPlayer(IjkLibLoader libLoader) { initPlayer(libLoader); } private void initPlayer(IjkLibLoader libLoader) { loadLibrariesOnce(libLoader); initNativeOnce(); Looper looper; if ((looper = Looper.myLooper()) != null) { mEventHandler = new EventHandler(this, looper); } else if ((looper = Looper.getMainLooper()) != null) { mEventHandler = new EventHandler(this, looper); } else { mEventHandler = null; } /* * Native setup requires a weak reference to our object. It's easier to * create it here than in C++. */ native_setup(new WeakReference<IjkMediaPlayer>(this)); } }
在构造方法的 initPlayer()
方法中,一次调用了 loadLibrariesOnce()
、 initNativeOnce()
、 创建 Looper 和 native_setup
public final class IjkMediaPlayer extends AbstractMediaPlayer { private static volatile boolean mIsLibLoaded = false; public static void loadLibrariesOnce(IjkLibLoader libLoader) { synchronized (IjkMediaPlayer.class) { if (!mIsLibLoaded) { if (libLoader == null) libLoader = sLocalLibLoader; libLoader.loadLibrary("ijkffmpeg"); libLoader.loadLibrary("ijksdl"); libLoader.loadLibrary("ijkplayer"); mIsLibLoaded = true; } } } }
loadLibrariesOnce()
中通过调用 IjkLibLoader 的 loadLibrary()
方法来加载 ijkplayer 的三个 so 库,最终是通过调用 System.loadLibrary()
来加载的 so
同时在 native 层与 java 方法的对应是通过 JNI_OnLoad()
来匹配的,卸载时会调用 JNI_UnLoad()
JNIEXPORT jint JNI_OnLoad(JavaVM *vm, void *reserved) { JNIEnv* env = NULL; g_jvm = vm; if ((*vm)->GetEnv(vm, (void**) &env, JNI_VERSION_1_4) != JNI_OK) { return -1; } assert(env != NULL); pthread_mutex_init(&g_clazz.mutex, NULL ); // FindClass returns LocalReference IJK_FIND_JAVA_CLASS(env, g_clazz.clazz, JNI_CLASS_IJKPLAYER); (*env)->RegisterNatives(env, g_clazz.clazz, g_methods, NELEM(g_methods) ); ijkmp_global_init(); ijkmp_global_set_inject_callback(inject_callback); FFmpegApi_global_init(env); return JNI_VERSION_1_4; } JNIEXPORT void JNI_OnUnload(JavaVM *jvm, void *reserved) { ijkmp_global_uninit(); pthread_mutex_destroy(&g_clazz.mutex); }
首先 RegisterNatives
注册 g_methods
中的 native 方法
static JNINativeMethod g_methods[] = { { "_setDataSource", "(Ljava/lang/String;[Ljava/lang/String;[Ljava/lang/String;)V", (void *) IjkMediaPlayer_setDataSourceAndHeaders }, { "_setDataSourceFd", "(I)V", (void *) IjkMediaPlayer_setDataSourceFd }, { "_setDataSource", "(Ltv/danmaku/ijk/media/player/misc/IMediaDataSource;)V", (void *)IjkMediaPlayer_setDataSourceCallback }, { "_setAndroidIOCallback", "(Ltv/danmaku/ijk/media/player/misc/IAndroidIO;)V", (void *)IjkMediaPlayer_setAndroidIOCallback }, { "_setVideoSurface", "(Landroid/view/Surface;)V", (void *) IjkMediaPlayer_setVideoSurface }, { "_prepareAsync", "()V", (void *) IjkMediaPlayer_prepareAsync }, { "_start", "()V", (void *) IjkMediaPlayer_start }, { "_stop", "()V", (void *) IjkMediaPlayer_stop }, { "seekTo", "(J)V", (void *) IjkMediaPlayer_seekTo }, { "_pause", "()V", (void *) IjkMediaPlayer_pause }, { "isPlaying", "()Z", (void *) IjkMediaPlayer_isPlaying }, { "getCurrentPosition", "()J", (void *) IjkMediaPlayer_getCurrentPosition }, { "getDuration", "()J", (void *) IjkMediaPlayer_getDuration }, { "_release", "()V", (void *) IjkMediaPlayer_release }, { "_reset", "()V", (void *) IjkMediaPlayer_reset }, { "setVolume", "(FF)V", (void *) IjkMediaPlayer_setVolume }, { "getAudioSessionId", "()I", (void *) IjkMediaPlayer_getAudioSessionId }, { "native_init", "()V", (void *) IjkMediaPlayer_native_init }, { "native_setup", "(Ljava/lang/Object;)V", (void *) IjkMediaPlayer_native_setup }, { "native_finalize", "()V", (void *) IjkMediaPlayer_native_finalize }, { "_setOption", "(ILjava/lang/String;Ljava/lang/String;)V", (void *) IjkMediaPlayer_setOption }, { "_setOption", "(ILjava/lang/String;J)V", (void *) IjkMediaPlayer_setOptionLong }, { "_getColorFormatName", "(I)Ljava/lang/String;", (void *) IjkMediaPlayer_getColorFormatName }, { "_getVideoCodecInfo", "()Ljava/lang/String;", (void *) IjkMediaPlayer_getVideoCodecInfo }, { "_getAudioCodecInfo", "()Ljava/lang/String;", (void *) IjkMediaPlayer_getAudioCodecInfo }, { "_getMediaMeta", "()Landroid/os/Bundle;", (void *) IjkMediaPlayer_getMediaMeta }, { "_setLoopCount", "(I)V", (void *) IjkMediaPlayer_setLoopCount }, { "_getLoopCount", "()I", (void *) IjkMediaPlayer_getLoopCount }, { "_getPropertyFloat", "(IF)F", (void *) ijkMediaPlayer_getPropertyFloat }, { "_setPropertyFloat", "(IF)V", (void *) ijkMediaPlayer_setPropertyFloat }, { "_getPropertyLong", "(IJ)J", (void *) ijkMediaPlayer_getPropertyLong }, { "_setPropertyLong", "(IJ)V", (void *) ijkMediaPlayer_setPropertyLong }, { "_setStreamSelected", "(IZ)V", (void *) ijkMediaPlayer_setStreamSelected }, { "native_profileBegin", "(Ljava/lang/String;)V", (void *) IjkMediaPlayer_native_profileBegin }, { "native_profileEnd", "()V", (void *) IjkMediaPlayer_native_profileEnd }, { "native_setLogLevel", "(I)V", (void *) IjkMediaPlayer_native_setLogLevel }, { "_setFrameAtTime", "(Ljava/lang/String;JJII)V", (void *) IjkMediaPlayer_setFrameAtTime }, };
注册之后 native 和 java 方法就对应起来了
接着调用了一些 init 方法
ijkmp_global_init()
初始化 ffmpeg ijkmp_global_set_inject_callback()
赋值 inject_callback FFmpegApi_global_init()
初始化 FFmpegApi#av_base64_encode()
void ijkmp_global_init() { ffp_global_init(); }
void ffp_global_init() { if (g_ffmpeg_global_inited) return; ALOGD("ijkmediaplayer version : %s", ijkmp_version()); /* register all codecs, demux and protocols */ avcodec_register_all(); #if CONFIG_AVDEVICE avdevice_register_all(); #endif #if CONFIG_AVFILTER avfilter_register_all(); #endif av_register_all(); ijkav_register_all(); avformat_network_init(); av_lockmgr_register(lockmgr); av_log_set_callback(ffp_log_callback_brief); av_init_packet(&flush_pkt); flush_pkt.data = (uint8_t *)&flush_pkt; g_ffmpeg_global_inited = true; }
在 ffp_global_init()
方法中进行了一系列的 register 和 init,其中就包括了 av_register_all()
public final class IjkMediaPlayer extends AbstractMediaPlayer { private static volatile boolean mIsNativeInitialized = false; private static void initNativeOnce() { synchronized (IjkMediaPlayer.class) { if (!mIsNativeInitialized) { native_init(); mIsNativeInitialized = true; } } } private static native void native_init(); }
initNativeOnce()
最终调用了 native 的 IjkMediaPlayer_native_init()
方法
static void IjkMediaPlayer_native_init(JNIEnv *env) { MPTRACE("%s/n", __func__); }
static void IjkMediaPlayer_native_setup(JNIEnv *env, jobject thiz, jobject weak_this) { MPTRACE("%s/n", __func__); IjkMediaPlayer *mp = ijkmp_android_create(message_loop); JNI_CHECK_GOTO(mp, env, "java/lang/OutOfMemoryError", "mpjni: native_setup: ijkmp_create() failed", LABEL_RETURN); jni_set_media_player(env, thiz, mp); ijkmp_set_weak_thiz(mp, (*env)->NewGlobalRef(env, weak_this)); ijkmp_set_inject_opaque(mp, ijkmp_get_weak_thiz(mp)); ijkmp_set_ijkio_inject_opaque(mp, ijkmp_get_weak_thiz(mp)); ijkmp_android_set_mediacodec_select_callback(mp, mediacodec_select_callback, ijkmp_get_weak_thiz(mp)); LABEL_RETURN: ijkmp_dec_ref_p(&mp); }
先看 ijkmp_android_create()
方法:
IjkMediaPlayer *ijkmp_android_create(int(*msg_loop)(void*)) { IjkMediaPlayer *mp = ijkmp_create(msg_loop); if (!mp) goto fail; mp->ffplayer->vout = SDL_VoutAndroid_CreateForAndroidSurface(); if (!mp->ffplayer->vout) goto fail; mp->ffplayer->pipeline = ffpipeline_create_from_android(mp->ffplayer); if (!mp->ffplayer->pipeline) goto fail; ffpipeline_set_vout(mp->ffplayer->pipeline, mp->ffplayer->vout); return mp; fail: ijkmp_dec_ref_p(&mp); return NULL; }
ijkmp_create()
就是去创建个 IjkMediaPlayer 对象,指定了 msg_loop 对象,同时指定了 ffplayer 对象;接下来通过 SDL_VoutAndroid_CreateForAndroidSurface()
创建 vout,在 ffpipeline_set_vout()
对 ffplayer 的输出 vout 进行配置
setDataSource()
设置视频源,对应到 native 层是 IjkMediaPlayer_setDataSourceCallback()
static void IjkMediaPlayer_setDataSourceCallback(JNIEnv *env, jobject thiz, jobject callback) { MPTRACE("%s/n", __func__); int retval = 0; char uri[128]; int64_t nativeMediaDataSource = 0; IjkMediaPlayer *mp = jni_get_media_player(env, thiz); JNI_CHECK_GOTO(callback, env, "java/lang/IllegalArgumentException", "mpjni: setDataSourceCallback: null fd", LABEL_RETURN); JNI_CHECK_GOTO(mp, env, "java/lang/IllegalStateException", "mpjni: setDataSourceCallback: null mp", LABEL_RETURN); nativeMediaDataSource = jni_set_media_data_source(env, thiz, callback); JNI_CHECK_GOTO(nativeMediaDataSource, env, "java/lang/IllegalStateException", "mpjni: jni_set_media_data_source: NewGlobalRef", LABEL_RETURN); ALOGV("setDataSourceCallback: %"PRId64"/n", nativeMediaDataSource); snprintf(uri, sizeof(uri), "ijkmediadatasource:%"PRId64, nativeMediaDataSource); retval = ijkmp_set_data_source(mp, uri); IJK_CHECK_MPRET_GOTO(retval, env, LABEL_RETURN); LABEL_RETURN: ijkmp_dec_ref_p(&mp); }
跟到 ijkmp_set_data_source()
去
static int ijkmp_set_data_source_l(IjkMediaPlayer *mp, const char *url) { assert(mp); assert(url); // MPST_RET_IF_EQ(mp->mp_state, MP_STATE_IDLE); MPST_RET_IF_EQ(mp->mp_state, MP_STATE_INITIALIZED); MPST_RET_IF_EQ(mp->mp_state, MP_STATE_ASYNC_PREPARING); MPST_RET_IF_EQ(mp->mp_state, MP_STATE_PREPARED); MPST_RET_IF_EQ(mp->mp_state, MP_STATE_STARTED); MPST_RET_IF_EQ(mp->mp_state, MP_STATE_PAUSED); MPST_RET_IF_EQ(mp->mp_state, MP_STATE_COMPLETED); MPST_RET_IF_EQ(mp->mp_state, MP_STATE_STOPPED); MPST_RET_IF_EQ(mp->mp_state, MP_STATE_ERROR); MPST_RET_IF_EQ(mp->mp_state, MP_STATE_END); freep((void**)&mp->data_source); mp->data_source = strdup(url); if (!mp->data_source) return EIJK_OUT_OF_MEMORY; ijkmp_change_state_l(mp, MP_STATE_INITIALIZED); return 0; }
这里看出 IjkMediaPlayer 中也应该是又一套状态机,默认情况是 MP_STATE_IDLE
,接着看 ijkmp_change_state_l()
方法
void ijkmp_change_state_l(IjkMediaPlayer *mp, int new_state) { mp->mp_state = new_state; ffp_notify_msg1(mp->ffplayer, FFP_MSG_PLAYBACK_STATE_CHANGED); }
将播放器的状态改为 MP_STATE_INITIALIZED
,然后再将 FFP_MSG_PLAYBACK_STATE_CHANGED
消息通过 ffp_notify_msg1()
传出去
对应接收的地方:
static void message_loop_n(JNIEnv *env, IjkMediaPlayer *mp) { jobject weak_thiz = (jobject) ijkmp_get_weak_thiz(mp); JNI_CHECK_GOTO(weak_thiz, env, NULL, "mpjni: message_loop_n: null weak_thiz", LABEL_RETURN); while (1) { AVMessage msg; int retval = ijkmp_get_msg(mp, &msg, 1); if (retval < 0) break; // block-get should never return 0 assert(retval > 0); switch (msg.what) { // .......... case FFP_MSG_PLAYBACK_STATE_CHANGED: break; default: ALOGE("unknown FFP_MSG_xxx(%d)/n", msg.what); break; } msg_free_res(&msg); } LABEL_RETURN: ; }
prepareAsync()
是去准备视频,对应到 native 层是 IjkMediaPlayer_prepareAsync()
static void IjkMediaPlayer_prepareAsync(JNIEnv *env, jobject thiz) { MPTRACE("%s/n", __func__); int retval = 0; IjkMediaPlayer *mp = jni_get_media_player(env, thiz); JNI_CHECK_GOTO(mp, env, "java/lang/IllegalStateException", "mpjni: prepareAsync: null mp", LABEL_RETURN); retval = ijkmp_prepare_async(mp); IJK_CHECK_MPRET_GOTO(retval, env, LABEL_RETURN); LABEL_RETURN: ijkmp_dec_ref_p(&mp); }
接着看 ijkmp_prepare_async()
int ijkmp_prepare_async(IjkMediaPlayer *mp) { assert(mp); MPTRACE("ijkmp_prepare_async()/n"); pthread_mutex_lock(&mp->mutex); int retval = ijkmp_prepare_async_l(mp); pthread_mutex_unlock(&mp->mutex); MPTRACE("ijkmp_prepare_async()=%d/n", retval); return retval; }
最后调用的是 ijkmp_prepare_async_l()
static int ijkmp_prepare_async_l(IjkMediaPlayer *mp) { assert(mp); MPST_RET_IF_EQ(mp->mp_state, MP_STATE_IDLE); // MPST_RET_IF_EQ(mp->mp_state, MP_STATE_INITIALIZED); MPST_RET_IF_EQ(mp->mp_state, MP_STATE_ASYNC_PREPARING); MPST_RET_IF_EQ(mp->mp_state, MP_STATE_PREPARED); MPST_RET_IF_EQ(mp->mp_state, MP_STATE_STARTED); MPST_RET_IF_EQ(mp->mp_state, MP_STATE_PAUSED); MPST_RET_IF_EQ(mp->mp_state, MP_STATE_COMPLETED); // MPST_RET_IF_EQ(mp->mp_state, MP_STATE_STOPPED); MPST_RET_IF_EQ(mp->mp_state, MP_STATE_ERROR); MPST_RET_IF_EQ(mp->mp_state, MP_STATE_END); assert(mp->data_source); ijkmp_change_state_l(mp, MP_STATE_ASYNC_PREPARING); msg_queue_start(&mp->ffplayer->msg_queue); // released in msg_loop ijkmp_inc_ref(mp); mp->msg_thread = SDL_CreateThreadEx(&mp->_msg_thread, ijkmp_msg_loop, mp, "ff_msg_loop"); // msg_thread is detached inside msg_loop // TODO: 9 release weak_thiz if pthread_create() failed; int retval = ffp_prepare_async_l(mp->ffplayer, mp->data_source); if (retval < 0) { ijkmp_change_state_l(mp, MP_STATE_ERROR); return retval; } return 0; }
MP_STATE_ASYNC_PREPARING ffp_prepare_async_l()
int ffp_prepare_async_l(FFPlayer *ffp, const char *file_name) { assert(ffp); assert(!ffp->is); assert(file_name); if (av_stristart(file_name, "rtmp", NULL) || av_stristart(file_name, "rtsp", NULL)) { // There is total different meaning for 'timeout' option in rtmp av_log(ffp, AV_LOG_WARNING, "remove 'timeout' option for rtmp./n"); av_dict_set(&ffp->format_opts, "timeout", NULL, 0); } /* there is a length limit in avformat */ if (strlen(file_name) + 1 > 1024) { av_log(ffp, AV_LOG_ERROR, "%s too long url/n", __func__); if (avio_find_protocol_name("ijklongurl:")) { av_dict_set(&ffp->format_opts, "ijklongurl-url", file_name, 0); file_name = "ijklongurl:"; } } av_log(NULL, AV_LOG_INFO, "===== versions =====/n"); ffp_show_version_str(ffp, "ijkplayer", ijk_version_info()); ffp_show_version_str(ffp, "FFmpeg", av_version_info()); ffp_show_version_int(ffp, "libavutil", avutil_version()); ffp_show_version_int(ffp, "libavcodec", avcodec_version()); ffp_show_version_int(ffp, "libavformat", avformat_version()); ffp_show_version_int(ffp, "libswscale", swscale_version()); ffp_show_version_int(ffp, "libswresample", swresample_version()); av_log(NULL, AV_LOG_INFO, "===== options =====/n"); ffp_show_dict(ffp, "player-opts", ffp->player_opts); ffp_show_dict(ffp, "format-opts", ffp->format_opts); ffp_show_dict(ffp, "codec-opts ", ffp->codec_opts); ffp_show_dict(ffp, "sws-opts ", ffp->sws_dict); ffp_show_dict(ffp, "swr-opts ", ffp->swr_opts); av_log(NULL, AV_LOG_INFO, "===================/n"); av_opt_set_dict(ffp, &ffp->player_opts); if (!ffp->aout) { ffp->aout = ffpipeline_open_audio_output(ffp->pipeline, ffp); if (!ffp->aout) return -1; } #if CONFIG_AVFILTER if (ffp->vfilter0) { GROW_ARRAY(ffp->vfilters_list, ffp->nb_vfilters); ffp->vfilters_list[ffp->nb_vfilters - 1] = ffp->vfilter0; } #endif VideoState *is = stream_open(ffp, file_name, NULL); if (!is) { av_log(NULL, AV_LOG_WARNING, "ffp_prepare_async_l: stream_open failed OOM"); return EIJK_OUT_OF_MEMORY; } ffp->is = is; ffp->input_filename = av_strdup(file_name); return 0; }
stream_open()
方法, 如果该方法失败,则播放失败
static VideoState *stream_open(FFPlayer *ffp, const char *filename, AVInputFormat *iformat) { assert(!ffp->is); VideoState *is; is = av_mallocz(sizeof(VideoState)); if (!is) return NULL; is->filename = av_strdup(filename); if (!is->filename) goto fail; is->iformat = iformat; is->ytop = 0; is->xleft = 0; #if defined(__ANDROID__) if (ffp->soundtouch_enable) { is->handle = ijk_soundtouch_create(); } #endif /* start video display */ if (frame_queue_init(&is->pictq, &is->videoq, ffp->pictq_size, 1) < 0) goto fail; if (frame_queue_init(&is->subpq, &is->subtitleq, SUBPICTURE_QUEUE_SIZE, 0) < 0) goto fail; if (frame_queue_init(&is->sampq, &is->audioq, SAMPLE_QUEUE_SIZE, 1) < 0) goto fail; if (packet_queue_init(&is->videoq) < 0 || packet_queue_init(&is->audioq) < 0 || packet_queue_init(&is->subtitleq) < 0) goto fail; if (!(is->continue_read_thread = SDL_CreateCond())) { av_log(NULL, AV_LOG_FATAL, "SDL_CreateCond(): %s/n", SDL_GetError()); goto fail; } if (!(is->video_accurate_seek_cond = SDL_CreateCond())) { av_log(NULL, AV_LOG_FATAL, "SDL_CreateCond(): %s/n", SDL_GetError()); ffp->enable_accurate_seek = 0; } if (!(is->audio_accurate_seek_cond = SDL_CreateCond())) { av_log(NULL, AV_LOG_FATAL, "SDL_CreateCond(): %s/n", SDL_GetError()); ffp->enable_accurate_seek = 0; } init_clock(&is->vidclk, &is->videoq.serial); init_clock(&is->audclk, &is->audioq.serial); init_clock(&is->extclk, &is->extclk.serial); is->audio_clock_serial = -1; if (ffp->startup_volume < 0) av_log(NULL, AV_LOG_WARNING, "-volume=%d < 0, setting to 0/n", ffp->startup_volume); if (ffp->startup_volume > 100) av_log(NULL, AV_LOG_WARNING, "-volume=%d > 100, setting to 100/n", ffp->startup_volume); ffp->startup_volume = av_clip(ffp->startup_volume, 0, 100); ffp->startup_volume = av_clip(SDL_MIX_MAXVOLUME * ffp->startup_volume / 100, 0, SDL_MIX_MAXVOLUME); is->audio_volume = ffp->startup_volume; is->muted = 0; is->av_sync_type = ffp->av_sync_type; is->play_mutex = SDL_CreateMutex(); is->accurate_seek_mutex = SDL_CreateMutex(); ffp->is = is; is->pause_req = !ffp->start_on_prepared; is->video_refresh_tid = SDL_CreateThreadEx(&is->_video_refresh_tid, video_refresh_thread, ffp, "ff_vout"); if (!is->video_refresh_tid) { av_freep(&ffp->is); return NULL; } is->initialized_decoder = 0; is->read_tid = SDL_CreateThreadEx(&is->_read_tid, read_thread, ffp, "ff_read"); if (!is->read_tid) { av_log(NULL, AV_LOG_FATAL, "SDL_CreateThread(): %s/n", SDL_GetError()); goto fail; } if (ffp->async_init_decoder && !ffp->video_disable && ffp->video_mime_type && strlen(ffp->video_mime_type) > 0 && ffp->mediacodec_default_name && strlen(ffp->mediacodec_default_name) > 0) { if (ffp->mediacodec_all_videos || ffp->mediacodec_avc || ffp->mediacodec_hevc || ffp->mediacodec_mpeg2) { decoder_init(&is->viddec, NULL, &is->videoq, is->continue_read_thread); ffp->node_vdec = ffpipeline_init_video_decoder(ffp->pipeline, ffp); } } is->initialized_decoder = 1; return is; fail: is->initialized_decoder = 1; is->abort_request = true; if (is->video_refresh_tid) SDL_WaitThread(is->video_refresh_tid, NULL); stream_close(ffp); return NULL; }
对 frame queue、 packet queue 和 clock 分别进行了初始化,同时在该方法中创建了两个线程
ff_vout ff_read
/* this thread gets the stream from the disk or the network */ static int read_thread(void *arg) { FFPlayer *ffp = arg; VideoState *is = ffp->is; AVFormatContext *ic = NULL; int err, i, ret __unused; int st_index[AVMEDIA_TYPE_NB]; AVPacket pkt1, *pkt = &pkt1; int64_t stream_start_time; int completed = 0; int pkt_in_play_range = 0; AVDictionaryEntry *t; SDL_mutex *wait_mutex = SDL_CreateMutex(); int scan_all_pmts_set = 0; int64_t pkt_ts; int last_error = 0; int64_t prev_io_tick_counter = 0; int64_t io_tick_counter = 0; int init_ijkmeta = 0; if (!wait_mutex) { av_log(NULL, AV_LOG_FATAL, "SDL_CreateMutex(): %s/n", SDL_GetError()); ret = AVERROR(ENOMEM); goto fail; } memset(st_index, -1, sizeof(st_index)); is->last_video_stream = is->video_stream = -1; is->last_audio_stream = is->audio_stream = -1; is->last_subtitle_stream = is->subtitle_stream = -1; is->eof = 0; ic = avformat_alloc_context(); if (!ic) { av_log(NULL, AV_LOG_FATAL, "Could not allocate context./n"); ret = AVERROR(ENOMEM); goto fail; } ic->interrupt_callback.callback = decode_interrupt_cb; ic->interrupt_callback.opaque = is; if (!av_dict_get(ffp->format_opts, "scan_all_pmts", NULL, AV_DICT_MATCH_CASE)) { av_dict_set(&ffp->format_opts, "scan_all_pmts", "1", AV_DICT_DONT_OVERWRITE); scan_all_pmts_set = 1; } if (av_stristart(is->filename, "rtmp", NULL) || av_stristart(is->filename, "rtsp", NULL)) { // There is total different meaning for 'timeout' option in rtmp av_log(ffp, AV_LOG_WARNING, "remove 'timeout' option for rtmp./n"); av_dict_set(&ffp->format_opts, "timeout", NULL, 0); } if (ffp->skip_calc_frame_rate) { av_dict_set_int(&ic->metadata, "skip-calc-frame-rate", ffp->skip_calc_frame_rate, 0); av_dict_set_int(&ffp->format_opts, "skip-calc-frame-rate", ffp->skip_calc_frame_rate, 0); } if (ffp->iformat_name) is->iformat = av_find_input_format(ffp->iformat_name); err = avformat_open_input(&ic, is->filename, is->iformat, &ffp->format_opts); if (err < 0) { print_error(is->filename, err); ret = -1; goto fail; } ffp_notify_msg1(ffp, FFP_MSG_OPEN_INPUT); if (scan_all_pmts_set) av_dict_set(&ffp->format_opts, "scan_all_pmts", NULL, AV_DICT_MATCH_CASE); if ((t = av_dict_get(ffp->format_opts, "", NULL, AV_DICT_IGNORE_SUFFIX))) { av_log(NULL, AV_LOG_ERROR, "Option %s not found./n", t->key); #ifdef FFP_MERGE ret = AVERROR_OPTION_NOT_FOUND; goto fail; #endif } is->ic = ic; if (ffp->genpts) ic->flags |= AVFMT_FLAG_GENPTS; av_format_inject_global_side_data(ic); // //AVDictionary **opts; //int orig_nb_streams; //opts = setup_find_stream_info_opts(ic, ffp->codec_opts); //orig_nb_streams = ic->nb_streams; if (ffp->find_stream_info) { AVDictionary **opts = setup_find_stream_info_opts(ic, ffp->codec_opts); int orig_nb_streams = ic->nb_streams; do { if (av_stristart(is->filename, "data:", NULL) && orig_nb_streams > 0) { for (i = 0; i < orig_nb_streams; i++) { if (!ic->streams[i] || !ic->streams[i]->codecpar || ic->streams[i]->codecpar->profile == FF_PROFILE_UNKNOWN) { break; } } if (i == orig_nb_streams) { break; } } err = avformat_find_stream_info(ic, opts); } while(0); ffp_notify_msg1(ffp, FFP_MSG_FIND_STREAM_INFO); for (i = 0; i < orig_nb_streams; i++) av_dict_free(&opts[i]); av_freep(&opts); if (err < 0) { av_log(NULL, AV_LOG_WARNING, "%s: could not find codec parameters/n", is->filename); ret = -1; goto fail; } } if (ic->pb) ic->pb->eof_reached = 0; // FIXME hack, ffplay maybe should not use avio_feof() to test for the end if (ffp->seek_by_bytes < 0) ffp->seek_by_bytes = !!(ic->iformat->flags & AVFMT_TS_DISCONT) && strcmp("ogg", ic->iformat->name); is->max_frame_duration = (ic->iformat->flags & AVFMT_TS_DISCONT) ? 10.0 : 3600.0; is->max_frame_duration = 10.0; av_log(ffp, AV_LOG_INFO, "max_frame_duration: %.3f/n", is->max_frame_duration); #ifdef FFP_MERGE if (!window_title && (t = av_dict_get(ic->metadata, "title", NULL, 0))) window_title = av_asprintf("%s - %s", t->value, input_filename); #endif /* if seeking requested, we execute it */ if (ffp->start_time != AV_NOPTS_VALUE) { int64_t timestamp; timestamp = ffp->start_time; /* add the stream start time */ if (ic->start_time != AV_NOPTS_VALUE) timestamp += ic->start_time; ret = avformat_seek_file(ic, -1, INT64_MIN, timestamp, INT64_MAX, 0); if (ret < 0) { av_log(NULL, AV_LOG_WARNING, "%s: could not seek to position %0.3f/n", is->filename, (double)timestamp / AV_TIME_BASE); } } is->realtime = is_realtime(ic); av_dump_format(ic, 0, is->filename, 0); int video_stream_count = 0; int h264_stream_count = 0; int first_h264_stream = -1; for (i = 0; i < ic->nb_streams; i++) { AVStream *st = ic->streams[i]; enum AVMediaType type = st->codecpar->codec_type; st->discard = AVDISCARD_ALL; if (type >= 0 && ffp->wanted_stream_spec[type] && st_index[type] == -1) if (avformat_match_stream_specifier(ic, st, ffp->wanted_stream_spec[type]) > 0) st_index[type] = i; // choose first h264 if (type == AVMEDIA_TYPE_VIDEO) { enum AVCodecID codec_id = st->codecpar->codec_id; video_stream_count++; if (codec_id == AV_CODEC_ID_H264) { h264_stream_count++; if (first_h264_stream < 0) first_h264_stream = i; } } } if (video_stream_count > 1 && st_index[AVMEDIA_TYPE_VIDEO] < 0) { st_index[AVMEDIA_TYPE_VIDEO] = first_h264_stream; av_log(NULL, AV_LOG_WARNING, "multiple video stream found, prefer first h264 stream: %d/n", first_h264_stream); } if (!ffp->video_disable) st_index[AVMEDIA_TYPE_VIDEO] = av_find_best_stream(ic, AVMEDIA_TYPE_VIDEO, st_index[AVMEDIA_TYPE_VIDEO], -1, NULL, 0); if (!ffp->audio_disable) st_index[AVMEDIA_TYPE_AUDIO] = av_find_best_stream(ic, AVMEDIA_TYPE_AUDIO, st_index[AVMEDIA_TYPE_AUDIO], st_index[AVMEDIA_TYPE_VIDEO], NULL, 0); if (!ffp->video_disable && !ffp->subtitle_disable) st_index[AVMEDIA_TYPE_SUBTITLE] = av_find_best_stream(ic, AVMEDIA_TYPE_SUBTITLE, st_index[AVMEDIA_TYPE_SUBTITLE], (st_index[AVMEDIA_TYPE_AUDIO] >= 0 ? st_index[AVMEDIA_TYPE_AUDIO] : st_index[AVMEDIA_TYPE_VIDEO]), NULL, 0); is->show_mode = ffp->show_mode; #ifdef FFP_MERGE // bbc: dunno if we need this if (st_index[AVMEDIA_TYPE_VIDEO] >= 0) { AVStream *st = ic->streams[st_index[AVMEDIA_TYPE_VIDEO]]; AVCodecParameters *codecpar = st->codecpar; AVRational sar = av_guess_sample_aspect_ratio(ic, st, NULL); if (codecpar->width) set_default_window_size(codecpar->width, codecpar->height, sar); } #endif /* open the streams */ if (st_index[AVMEDIA_TYPE_AUDIO] >= 0) { stream_component_open(ffp, st_index[AVMEDIA_TYPE_AUDIO]); } else { ffp->av_sync_type = AV_SYNC_VIDEO_MASTER; is->av_sync_type = ffp->av_sync_type; } ret = -1; if (st_index[AVMEDIA_TYPE_VIDEO] >= 0) { ret = stream_component_open(ffp, st_index[AVMEDIA_TYPE_VIDEO]); } if (is->show_mode == SHOW_MODE_NONE) is->show_mode = ret >= 0 ? SHOW_MODE_VIDEO : SHOW_MODE_RDFT; if (st_index[AVMEDIA_TYPE_SUBTITLE] >= 0) { stream_component_open(ffp, st_index[AVMEDIA_TYPE_SUBTITLE]); } ffp_notify_msg1(ffp, FFP_MSG_COMPONENT_OPEN); if (!ffp->ijkmeta_delay_init) { ijkmeta_set_avformat_context_l(ffp->meta, ic); } ffp->stat.bit_rate = ic->bit_rate; if (st_index[AVMEDIA_TYPE_VIDEO] >= 0) ijkmeta_set_int64_l(ffp->meta, IJKM_KEY_VIDEO_STREAM, st_index[AVMEDIA_TYPE_VIDEO]); if (st_index[AVMEDIA_TYPE_AUDIO] >= 0) ijkmeta_set_int64_l(ffp->meta, IJKM_KEY_AUDIO_STREAM, st_index[AVMEDIA_TYPE_AUDIO]); if (st_index[AVMEDIA_TYPE_SUBTITLE] >= 0) ijkmeta_set_int64_l(ffp->meta, IJKM_KEY_TIMEDTEXT_STREAM, st_index[AVMEDIA_TYPE_SUBTITLE]); if (is->video_stream < 0 && is->audio_stream < 0) { av_log(NULL, AV_LOG_FATAL, "Failed to open file '%s' or configure filtergraph/n", is->filename); ret = -1; goto fail; } if (is->audio_stream >= 0) { is->audioq.is_buffer_indicator = 1; is->buffer_indicator_queue = &is->audioq; } else if (is->video_stream >= 0) { is->videoq.is_buffer_indicator = 1; is->buffer_indicator_queue = &is->videoq; } else { assert("invalid streams"); } if (ffp->infinite_buffer < 0 && is->realtime) ffp->infinite_buffer = 1; if (!ffp->render_wait_start && !ffp->start_on_prepared) toggle_pause(ffp, 1); if (is->video_st && is->video_st->codecpar) { AVCodecParameters *codecpar = is->video_st->codecpar; ffp_notify_msg3(ffp, FFP_MSG_VIDEO_SIZE_CHANGED, codecpar->width, codecpar->height); ffp_notify_msg3(ffp, FFP_MSG_SAR_CHANGED, codecpar->sample_aspect_ratio.num, codecpar->sample_aspect_ratio.den); } ffp->prepared = true; ffp_notify_msg1(ffp, FFP_MSG_PREPARED); if (!ffp->render_wait_start && !ffp->start_on_prepared) { while (is->pause_req && !is->abort_request) { SDL_Delay(20); } } if (ffp->auto_resume) { ffp_notify_msg1(ffp, FFP_REQ_START); ffp->auto_resume = 0; } /* offset should be seeked*/ if (ffp->seek_at_start > 0) { ffp_seek_to_l(ffp, (long)(ffp->seek_at_start)); } for (;;) { if (is->abort_request) break; #ifdef FFP_MERGE if (is->paused != is->last_paused) { is->last_paused = is->paused; if (is->paused) is->read_pause_return = av_read_pause(ic); else av_read_play(ic); } #endif #if CONFIG_RTSP_DEMUXER || CONFIG_MMSH_PROTOCOL if (is->paused && (!strcmp(ic->iformat->name, "rtsp") || (ic->pb && !strncmp(ffp->input_filename, "mmsh:", 5)))) { /* wait 10 ms to avoid trying to get another packet */ /* XXX: horrible */ SDL_Delay(10); continue; } #endif if (is->seek_req) { int64_t seek_target = is->seek_pos; int64_t seek_min = is->seek_rel > 0 ? seek_target - is->seek_rel + 2: INT64_MIN; int64_t seek_max = is->seek_rel < 0 ? seek_target - is->seek_rel - 2: INT64_MAX; // FIXME the +-2 is due to rounding being not done in the correct direction in generation // of the seek_pos/seek_rel variables ffp_toggle_buffering(ffp, 1); ffp_notify_msg3(ffp, FFP_MSG_BUFFERING_UPDATE, 0, 0); ret = avformat_seek_file(is->ic, -1, seek_min, seek_target, seek_max, is->seek_flags); if (ret < 0) { av_log(NULL, AV_LOG_ERROR, "%s: error while seeking/n", is->ic->filename); } else { if (is->audio_stream >= 0) { packet_queue_flush(&is->audioq); packet_queue_put(&is->audioq, &flush_pkt); // TODO: clear invaild audio data // SDL_AoutFlushAudio(ffp->aout); } if (is->subtitle_stream >= 0) { packet_queue_flush(&is->subtitleq); packet_queue_put(&is->subtitleq, &flush_pkt); } if (is->video_stream >= 0) { if (ffp->node_vdec) { ffpipenode_flush(ffp->node_vdec); } packet_queue_flush(&is->videoq); packet_queue_put(&is->videoq, &flush_pkt); } if (is->seek_flags & AVSEEK_FLAG_BYTE) { set_clock(&is->extclk, NAN, 0); } else { set_clock(&is->extclk, seek_target / (double)AV_TIME_BASE, 0); } is->latest_video_seek_load_serial = is->videoq.serial; is->latest_audio_seek_load_serial = is->audioq.serial; is->latest_seek_load_start_at = av_gettime(); } ffp->dcc.current_high_water_mark_in_ms = ffp->dcc.first_high_water_mark_in_ms; is->seek_req = 0; is->queue_attachments_req = 1; is->eof = 0; #ifdef FFP_MERGE if (is->paused) step_to_next_frame(is); #endif completed = 0; SDL_LockMutex(ffp->is->play_mutex); if (ffp->auto_resume) { is->pause_req = 0; if (ffp->packet_buffering) is->buffering_on = 1; ffp->auto_resume = 0; stream_update_pause_l(ffp); } if (is->pause_req) step_to_next_frame_l(ffp); SDL_UnlockMutex(ffp->is->play_mutex); if (ffp->enable_accurate_seek) { is->drop_aframe_count = 0; is->drop_vframe_count = 0; SDL_LockMutex(is->accurate_seek_mutex); if (is->video_stream >= 0) { is->video_accurate_seek_req = 1; } if (is->audio_stream >= 0) { is->audio_accurate_seek_req = 1; } SDL_CondSignal(is->audio_accurate_seek_cond); SDL_CondSignal(is->video_accurate_seek_cond); SDL_UnlockMutex(is->accurate_seek_mutex); } ffp_notify_msg3(ffp, FFP_MSG_SEEK_COMPLETE, (int)fftime_to_milliseconds(seek_target), ret); ffp_toggle_buffering(ffp, 1); } if (is->queue_attachments_req) { if (is->video_st && (is->video_st->disposition & AV_DISPOSITION_ATTACHED_PIC)) { AVPacket copy = { 0 }; if ((ret = av_packet_ref(©, &is->video_st->attached_pic)) < 0) goto fail; packet_queue_put(&is->videoq, ©); packet_queue_put_nullpacket(&is->videoq, is->video_stream); } is->queue_attachments_req = 0; } /* if the queue are full, no need to read more */ if (ffp->infinite_buffer<1 && !is->seek_req && #ifdef FFP_MERGE (is->audioq.size + is->videoq.size + is->subtitleq.size > MAX_QUEUE_SIZE #else (is->audioq.size + is->videoq.size + is->subtitleq.size > ffp->dcc.max_buffer_size #endif || ( stream_has_enough_packets(is->audio_st, is->audio_stream, &is->audioq, MIN_FRAMES) && stream_has_enough_packets(is->video_st, is->video_stream, &is->videoq, MIN_FRAMES) && stream_has_enough_packets(is->subtitle_st, is->subtitle_stream, &is->subtitleq, MIN_FRAMES)))) { if (!is->eof) { ffp_toggle_buffering(ffp, 0); } /* wait 10 ms */ SDL_LockMutex(wait_mutex); SDL_CondWaitTimeout(is->continue_read_thread, wait_mutex, 10); SDL_UnlockMutex(wait_mutex); continue; } if ((!is->paused || completed) && (!is->audio_st || (is->auddec.finished == is->audioq.serial && frame_queue_nb_remaining(&is->sampq) == 0)) && (!is->video_st || (is->viddec.finished == is->videoq.serial && frame_queue_nb_remaining(&is->pictq) == 0))) { if (ffp->loop != 1 && (!ffp->loop || --ffp->loop)) { stream_seek(is, ffp->start_time != AV_NOPTS_VALUE ? ffp->start_time : 0, 0, 0); } else if (ffp->autoexit) { ret = AVERROR_EOF; goto fail; } else { ffp_statistic_l(ffp); if (completed) { av_log(ffp, AV_LOG_INFO, "ffp_toggle_buffering: eof/n"); SDL_LockMutex(wait_mutex); // infinite wait may block shutdown while(!is->abort_request && !is->seek_req) SDL_CondWaitTimeout(is->continue_read_thread, wait_mutex, 100); SDL_UnlockMutex(wait_mutex); if (!is->abort_request) continue; } else { completed = 1; ffp->auto_resume = 0; // TODO: 0 it's a bit early to notify complete here ffp_toggle_buffering(ffp, 0); toggle_pause(ffp, 1); if (ffp->error) { av_log(ffp, AV_LOG_INFO, "ffp_toggle_buffering: error: %d/n", ffp->error); ffp_notify_msg1(ffp, FFP_MSG_ERROR); } else { av_log(ffp, AV_LOG_INFO, "ffp_toggle_buffering: completed: OK/n"); ffp_notify_msg1(ffp, FFP_MSG_COMPLETED); } } } } pkt->flags = 0; ret = av_read_frame(ic, pkt); if (ret < 0) { int pb_eof = 0; int pb_error = 0; if ((ret == AVERROR_EOF || avio_feof(ic->pb)) && !is->eof) { ffp_check_buffering_l(ffp); pb_eof = 1; // check error later } if (ic->pb && ic->pb->error) { pb_eof = 1; pb_error = ic->pb->error; } if (ret == AVERROR_EXIT) { pb_eof = 1; pb_error = AVERROR_EXIT; } if (pb_eof) { if (is->video_stream >= 0) packet_queue_put_nullpacket(&is->videoq, is->video_stream); if (is->audio_stream >= 0) packet_queue_put_nullpacket(&is->audioq, is->audio_stream); if (is->subtitle_stream >= 0) packet_queue_put_nullpacket(&is->subtitleq, is->subtitle_stream); is->eof = 1; } if (pb_error) { if (is->video_stream >= 0) packet_queue_put_nullpacket(&is->videoq, is->video_stream); if (is->audio_stream >= 0) packet_queue_put_nullpacket(&is->audioq, is->audio_stream); if (is->subtitle_stream >= 0) packet_queue_put_nullpacket(&is->subtitleq, is->subtitle_stream); is->eof = 1; ffp->error = pb_error; av_log(ffp, AV_LOG_ERROR, "av_read_frame error: %s/n", ffp_get_error_string(ffp->error)); // break; } else { ffp->error = 0; } if (is->eof) { ffp_toggle_buffering(ffp, 0); SDL_Delay(100); } SDL_LockMutex(wait_mutex); SDL_CondWaitTimeout(is->continue_read_thread, wait_mutex, 10); SDL_UnlockMutex(wait_mutex); ffp_statistic_l(ffp); continue; } else { is->eof = 0; } if (pkt->flags & AV_PKT_FLAG_DISCONTINUITY) { if (is->audio_stream >= 0) { packet_queue_put(&is->audioq, &flush_pkt); } if (is->subtitle_stream >= 0) { packet_queue_put(&is->subtitleq, &flush_pkt); } if (is->video_stream >= 0) { packet_queue_put(&is->videoq, &flush_pkt); } } /* check if packet is in play range specified by user, then queue, otherwise discard */ stream_start_time = ic->streams[pkt->stream_index]->start_time; pkt_ts = pkt->pts == AV_NOPTS_VALUE ? pkt->dts : pkt->pts; pkt_in_play_range = ffp->duration == AV_NOPTS_VALUE || (pkt_ts - (stream_start_time != AV_NOPTS_VALUE ? stream_start_time : 0)) * av_q2d(ic->streams[pkt->stream_index]->time_base) - (double)(ffp->start_time != AV_NOPTS_VALUE ? ffp->start_time : 0) / 1000000 <= ((double)ffp->duration / 1000000); if (pkt->stream_index == is->audio_stream && pkt_in_play_range) { packet_queue_put(&is->audioq, pkt); } else if (pkt->stream_index == is->video_stream && pkt_in_play_range && !(is->video_st && (is->video_st->disposition & AV_DISPOSITION_ATTACHED_PIC))) { packet_queue_put(&is->videoq, pkt); } else if (pkt->stream_index == is->subtitle_stream && pkt_in_play_range) { packet_queue_put(&is->subtitleq, pkt); } else { av_packet_unref(pkt); } ffp_statistic_l(ffp); if (ffp->ijkmeta_delay_init && !init_ijkmeta && (ffp->first_video_frame_rendered || !is->video_st) && (ffp->first_audio_frame_rendered || !is->audio_st)) { ijkmeta_set_avformat_context_l(ffp->meta, ic); init_ijkmeta = 1; } if (ffp->packet_buffering) { io_tick_counter = SDL_GetTickHR(); if ((!ffp->first_video_frame_rendered && is->video_st) || (!ffp->first_audio_frame_rendered && is->audio_st)) { if (abs((int)(io_tick_counter - prev_io_tick_counter)) > FAST_BUFFERING_CHECK_PER_MILLISECONDS) { prev_io_tick_counter = io_tick_counter; ffp->dcc.current_high_water_mark_in_ms = ffp->dcc.first_high_water_mark_in_ms; ffp_check_buffering_l(ffp); } } else { if (abs((int)(io_tick_counter - prev_io_tick_counter)) > BUFFERING_CHECK_PER_MILLISECONDS) { prev_io_tick_counter = io_tick_counter; ffp_check_buffering_l(ffp); } } } } ret = 0; fail: if (ic && !is->ic) avformat_close_input(&ic); if (!ffp->prepared || !is->abort_request) { ffp->last_error = last_error; ffp_notify_msg2(ffp, FFP_MSG_ERROR, last_error); } SDL_DestroyMutex(wait_mutex); return 0; }
avformat_alloc_context()
方法来初始化 AVFormatContext avformat_open_input()
方法完成文件的打开和格式的探测 ffp_notify_msg1()
发送 FFP_MSG_OPEN_INPUT
消息 avformat_find_stream_info()
来读媒体文件的 packet ffp_notify_msg1()
发送 FFP_MSG_FIND_STREAM_INFO
消息 avformat_seek_file()
stream_component_open()
来打开解码器 avcodec_alloc_context3() avcodec_parameters_to_context() avcodec_find_decoder avcodec_open2() decoder_start()
ffp_notify_msg1()
发送 FFP_MSG_COMPONENT_OPEN
消息 ffp->prepared = true
ffp_notify_msg1()
发送 FFP_MSG_PREPARED
消息 for(;;)
循环,不断 av_read_frame()
FFP_MSG_PREPARED
消息走到 message_loop_n
中:
static void message_loop_n(JNIEnv *env, IjkMediaPlayer *mp) { jobject weak_thiz = (jobject) ijkmp_get_weak_thiz(mp); JNI_CHECK_GOTO(weak_thiz, env, NULL, "mpjni: message_loop_n: null weak_thiz", LABEL_RETURN); while (1) { AVMessage msg; int retval = ijkmp_get_msg(mp, &msg, 1); if (retval < 0) break; // block-get should never return 0 assert(retval > 0); switch (msg.what) { case FFP_MSG_PREPARED: MPTRACE("FFP_MSG_PREPARED:/n"); post_event(env, weak_thiz, MEDIA_PREPARED, 0, 0); break; default: ALOGE("unknown FFP_MSG_xxx(%d)/n", msg.what); break; } msg_free_res(&msg); } LABEL_RETURN: ; }
在 ijkmp_get_msg()
方法中通过 ijkmp_change_state_l
将状态改为 MP_STATE_PREPARED
/* need to call msg_free_res for freeing the resouce obtained in msg */ int ijkmp_get_msg(IjkMediaPlayer *mp, AVMessage *msg, int block) { assert(mp); while (1) { int continue_wait_next_msg = 0; int retval = msg_queue_get(&mp->ffplayer->msg_queue, msg, block); if (retval <= 0) return retval; switch (msg->what) { case FFP_MSG_PREPARED: MPTRACE("ijkmp_get_msg: FFP_MSG_PREPARED/n"); pthread_mutex_lock(&mp->mutex); if (mp->mp_state == MP_STATE_ASYNC_PREPARING) { ijkmp_change_state_l(mp, MP_STATE_PREPARED); } else { // FIXME: 1: onError() ? av_log(mp->ffplayer, AV_LOG_DEBUG, "FFP_MSG_PREPARED: expecting mp_state==MP_STATE_ASYNC_PREPARING/n"); } if (!mp->ffplayer->start_on_prepared) { ijkmp_change_state_l(mp, MP_STATE_PAUSED); } pthread_mutex_unlock(&mp->mutex); break; } if (continue_wait_next_msg) { msg_free_res(msg); continue; } return retval; } return -1; }
同时通过 post_event()
将消息传递给 java 层
inline static void post_event(JNIEnv *env, jobject weak_this, int what, int arg1, int arg2) { // MPTRACE("post_event(%p, %p, %d, %d, %d)", (void*)env, (void*) weak_this, what, arg1, arg2); J4AC_IjkMediaPlayer__postEventFromNative(env, weak_this, what, arg1, arg2, NULL); // MPTRACE("post_event()=void"); }
对应到 java 层:
public final class IjkMediaPlayer extends AbstractMediaPlayer { /* * Called from native code when an interesting event happens. This method * just uses the EventHandler system to post the event back to the main app * thread. We use a weak reference to the original IjkMediaPlayer object so * that the native code is safe from the object disappearing from underneath * it. (This is the cookie passed to native_setup().) */ @CalledByNative private static void postEventFromNative(Object weakThiz, int what, int arg1, int arg2, Object obj) { if (weakThiz == null) return; @SuppressWarnings("rawtypes") IjkMediaPlayer mp = (IjkMediaPlayer) ((WeakReference) weakThiz).get(); if (mp == null) { return; } if (what == MEDIA_INFO && arg1 == MEDIA_INFO_STARTED_AS_NEXT) { // this acquires the wakelock if needed, and sets the client side // state mp.start(); } if (mp.mEventHandler != null) { Message m = mp.mEventHandler.obtainMessage(what, arg1, arg2, obj); mp.mEventHandler.sendMessage(m); } } private static class EventHandler extends Handler { private final WeakReference<IjkMediaPlayer> mWeakPlayer; public EventHandler(IjkMediaPlayer mp, Looper looper) { super(looper); mWeakPlayer = new WeakReference<IjkMediaPlayer>(mp); } @Override public void handleMessage(Message msg) { IjkMediaPlayer player = mWeakPlayer.get(); if (player == null || player.mNativeMediaPlayer == 0) { DebugLog.w(TAG, "IjkMediaPlayer went away with unhandled events"); return; } switch (msg.what) { case MEDIA_PREPARED: player.notifyOnPrepared(); return; case MEDIA_PLAYBACK_COMPLETE: player.stayAwake(false); player.notifyOnCompletion(); return; case MEDIA_BUFFERING_UPDATE: long bufferPosition = msg.arg1; if (bufferPosition < 0) { bufferPosition = 0; } long percent = 0; long duration = player.getDuration(); if (duration > 0) { percent = bufferPosition * 100 / duration; } if (percent >= 100) { percent = 100; } // DebugLog.efmt(TAG, "Buffer (%d%%) %d/%d", percent, bufferPosition, duration); player.notifyOnBufferingUpdate((int)percent); return; case MEDIA_SEEK_COMPLETE: player.notifyOnSeekComplete(); return; case MEDIA_SET_VIDEO_SIZE: player.mVideoWidth = msg.arg1; player.mVideoHeight = msg.arg2; player.notifyOnVideoSizeChanged(player.mVideoWidth, player.mVideoHeight, player.mVideoSarNum, player.mVideoSarDen); return; case MEDIA_ERROR: DebugLog.e(TAG, "Error (" + msg.arg1 + "," + msg.arg2 + ")"); if (!player.notifyOnError(msg.arg1, msg.arg2)) { player.notifyOnCompletion(); } player.stayAwake(false); return; case MEDIA_INFO: switch (msg.arg1) { case MEDIA_INFO_VIDEO_RENDERING_START: DebugLog.i(TAG, "Info: MEDIA_INFO_VIDEO_RENDERING_START/n"); break; } player.notifyOnInfo(msg.arg1, msg.arg2); // No real default action so far. return; case MEDIA_TIMED_TEXT: if (msg.obj == null) { player.notifyOnTimedText(null); } else { IjkTimedText text = new IjkTimedText(new Rect(0, 0, 1, 1), (String)msg.obj); player.notifyOnTimedText(text); } return; case MEDIA_NOP: // interface test message - ignore break; case MEDIA_SET_VIDEO_SAR: player.mVideoSarNum = msg.arg1; player.mVideoSarDen = msg.arg2; player.notifyOnVideoSizeChanged(player.mVideoWidth, player.mVideoHeight, player.mVideoSarNum, player.mVideoSarDen); break; default: DebugLog.e(TAG, "Unknown message type " + msg.what); } } } }
最终通过 player.notifyOnPrepared()
回调出去