//https://blog.csdn.net/weixin_39799839/article/details/79186034
//https://www.jianshu.com/p/7d9b86919682
//https://www.jianshu.com/p/84151c863c72
/////////////////////////////////////////////////////////////////////////////////////////////////////////////////
IjkMediaPlayer_setVideoSurface
    |--ijkmp_android_set_surface //go to Ijkplayer_android.c

ijkplayer
	native_init()
	native_setup(new WeakReference<IjkMediaPlayer>(this));

	ijkMediaPlayer.setOption(IjkMediaPlayer.OPT_CATEGORY_PLAYER, "mediacodec", 0);
		_setOption(category, name, value)
	ijkMediaPlayer.setOption(IjkMediaPlayer.OPT_CATEGORY_PLAYER, "opensles", 0);
		
	ijkMediaPlayer.setOption(IjkMediaPlayer.OPT_CATEGORY_PLAYER, "overlay-format", IjkMediaPlayer.SDL_FCC_RV32); // SDL_FCC_RV32 = 0x32335652; // RGBX8888
	
	ijkMediaPlayer.setOption(IjkMediaPlayer.OPT_CATEGORY_PLAYER, "framedrop", 1);
	ijkMediaPlayer.setOption(IjkMediaPlayer.OPT_CATEGORY_PLAYER, "start-on-prepared", 0);

	ijkMediaPlayer.setOption(IjkMediaPlayer.OPT_CATEGORY_FORMAT, "http-detect-range-support", 0);

	ijkMediaPlayer.setOption(IjkMediaPlayer.OPT_CATEGORY_CODEC, "skip_loop_filter", 48);

	
	_setDataSource(path, null, null);
	_prepareAsync();
	_start();
	
/////////////////////////////////////////////////////////////////////////////////////////////////////////////////
ijkplayer_jni.c


IjkMediaPlayer_native_init//空操作

IjkMediaPlayer_native_setup

IjkMediaPlayer_prepareAsync
	mp->msg_thread = SDL_CreateThreadEx(&mp->_msg_thread, ijkmp_msg_loop, mp, "ff_msg_loop");
		ijkmp_msg_loop //Ijkplayer_jni.c
			static int message_loop(void *arg)//int ret = mp->msg_loop(arg);
			message_loop(void *arg)			
				message_loop_n(JNIEnv *env, IjkMediaPlayer *mp)			
					int retval = ijkmp_get_msg(mp, &msg, 1);
			
	ffp_prepare_async_l
		VideoState *is = stream_open(ffp, file_name, NULL);
			is->video_refresh_tid = SDL_CreateThreadEx(&is->_video_refresh_tid, video_refresh_thread, ffp, "ff_vout");//视频显示线程
				video_refresh_thread(void *arg)
					video_refresh(ffp, &remaining_time);
						video_display2(ffp);
							video_image_display2(ffp);
								vp = frame_queue_peek_last(&is->pictq);
								SDL_VoutDisplayYUVOverlay(ffp->vout, vp->bmp);
									vout->display_overlay(vout, overlay);

			is->read_tid = SDL_CreateThreadEx(&is->_read_tid, read_thread, ffp, "ff_read");//ff_ffplay.c  获取avPacket
				stream_component_open(ffp, st_index[AVMEDIA_TYPE_AUDIO]);
					audio_open(ffp, channel_layout, nb_channels, sample_rate, &is->audio_tgt))  //ff_ffplay.c
						wanted_spec.callback = sdl_audio_callback;  //ff_ffplay.c  音频重采样
							audio_decode_frame(ffp); //音频重采样
								af = frame_queue_peek_readable(&is->sampq) //从队列获取数据，停止休眠 A2
								
								swr_convert(is->swr_ctx, out, out_count, in, af->frame->nb_samples);// const uint8_t **in = (const uint8_t **)af->frame->extended_data;
																									//uint8_t **out = &is->audio_buf1;
						SDL_AoutOpenAudio(ffp->aout, &wanted_spec, &spec)  //ff_ffplay.c
							aout->open_audio(aout, desired, obtained)
								opaque->audio_tid = SDL_CreateThreadEx(&opaque->_audio_tid, aout_thread, aout, "ff_aout_android");
									aout_thread_n(JNIEnv *env, SDL_Aout *aout)
										SDL_Android_AudioTrack_play
										audio_cblk(userdata, buffer, copy_size);//ijksdl_aout_android_audiotrack.c
										SDL_Android_AudioTrack_write(env, atrack, buffer, copy_size); //
											class_J4AC_android_media_AudioTrack.method_write //audioTrack 播放
					decoder_start(&is->auddec, audio_thread, ffp, "ff_audio_dec")  //ff_ffplay.c 音频解码线程
						SDL_CreateThreadEx(&d->_decoder_tid, fn, arg, name);
							audio_thread
								decoder_decode_frame(ffp, &is->auddec, frame, NULL)
									avcodec_receive_frame(d->avctx, frame);
									packet_queue_get_or_buffering(ffp, d->queue, &pkt, &d->pkt_serial, &d->finished)
										int new_packet = packet_queue_get(q, pkt, 0, serial);//A1
									avcodec_send_packet(d->avctx, &pkt)
								av_frame_move_ref(af->frame, frame);  //复制frame到af->frame
								frame_queue_push(&is->sampq);  //通知唤醒  A2
						
-----------------
IjkMediaPlayer_native_setup
ijkmp_android_create(int(*msg_loop)(void*))
ffpipeline_create_from_android(mp->ffplayer) // ijkplayer_android.c
static SDL_Aout *func_open_audio_output(IJKFF_Pipeline *pipeline, FFPlayer *ffp)
    if (ffp->opensles) {
        aout = SDL_AoutAndroid_CreateForOpenSLES();
    } else {   //ijkMediaPlayer.setOption(IjkMediaPlayer.OPT_CATEGORY_PLAYER, "opensles", 0);
        aout = SDL_AoutAndroid_CreateForAudioTrack();
    }
	
	ffpipeline_android.c
	SDL_AoutAndroid_CreateForOpenSLES
		aout->open_audio   = aout_open_audio;
	SDL_AoutAndroid_CreateForAudioTrack  //Ijksdl_aout_android_audiotrack.c
		aout->open_audio   = aout_open_audio;//ijksdl_aout_android_audiotrack.c
			audio_cblk(userdata, buffer, copy_size);// sdl_audio_callback 音频重采样

ffpipeline_create_from_android(mp->ffplayer) // ijkplayer_android.c
	pipeline->func_destroy              = func_destroy;             //ffpipeline_android.c
	pipeline->func_open_video_decoder   = func_open_video_decoder;
	pipeline->func_open_audio_output    = func_open_audio_output;
	pipeline->func_init_video_decoder   = func_init_video_decoder;
	pipeline->func_config_video_decoder = func_config_video_decoder;
		static IJKFF_Pipenode *func_open_video_decoder(IJKFF_Pipeline *pipeline, FFPlayer *ffp) //ffpipeline_android.c
			if (ffp->mediacodec_all_videos || ffp->mediacodec_avc || ffp->mediacodec_hevc || ffp->mediacodec_mpeg2)//硬解
				node = ffpipenode_create_video_decoder_from_android_mediacodec(ffp, pipeline, opaque->weak_vout);//Ffpipenode_android_mediacodec_vdec.c
					if (ffp->mediacodec_sync) {
						node->func_run_sync = func_run_sync_loop;
					} else {
						node->func_run_sync = func_run_sync;//使用这个
							func_run_sync(IJKFF_Pipenode *node)//ff_video_dec 线程中  Ffpipenode_android_mediacodec_vdec.c
								opaque->enqueue_thread = SDL_CreateThreadEx(&opaque->_enqueue_thread, enqueue_thread_func, node, "amediacodec_input_thread");
									feed_input_buffer(env, node, AMC_INPUT_TIMEOUT_US, &dequeue_count);
										ffp_packet_queue_get_or_buffering(ffp, d->queue, &pkt, &d->pkt_serial, &d->finished)
										convert_h264_to_annexb(d->pkt_temp.data, d->pkt_temp.size, opaque->nal_size, &convert_state);
										SDL_AMediaCodecFake_flushFakeFrames(opaque->acodec);
										copy_size = SDL_AMediaCodec_writeInputData(opaque->acodec, input_buffer_index, d->pkt_temp.data, d->pkt_temp.size);
										amc_ret = SDL_AMediaCodec_queueInputBuffer(opaque->acodec, input_buffer_index, 0, copy_size, time_stamp, queue_flags);
									
								ret = drain_output_buffer(env, node, timeUs, &dequeue_count, frame, &got_frame);
									int ret = drain_output_buffer_l(env, node, timeUs, dequeue_count, frame, got_frame);
										output_buffer_index = SDL_AMediaCodecFake_dequeueOutputBuffer(opaque->acodec, &bufferInfo, timeUs);//Ffpipenode_android_mediacodec_vdec.c
											acodec->func_dequeueOutputBuffer(acodec, info, timeoutUs);//Ijksdl_codec_android_mediacodec.c
												SDL_AMediaCodecJava_init(JNIEnv *env, jobject android_media_codec)
													acodec->func_dequeueOutputBuffer    = SDL_AMediaCodecJava_dequeueOutputBuffer;//ijksdl_codec_android_mediacodec_java.c //use
														J4AC_MediaCodec__BufferInfo__BufferInfo__asGlobalRef__catchAll
														J4AC_MediaCodec__dequeueOutputBuffer
														J4A_ExceptionCheck__catchAll
														J4AC_MediaCodec__BufferInfo__offset__get__catchAll
														J4AC_MediaCodec__BufferInfo__size__get__catchAll
														J4AC_MediaCodec__BufferInfo__presentationTimeUs__get__catchAll
														J4AC_MediaCodec__BufferInfo__flags__get__catchAll
								ffp_queue_picture(ffp, frame, pts, duration, av_frame_get_pkt_pos(frame), is->viddec.pkt_serial);
									vp = frame_queue_peek_writable(&is->pictq)
									vp->sar = src_frame->sample_aspect_ratio;
									SDL_VoutLockYUVOverlay(vp->bmp);
									SDL_VoutFillFrameYUVOverlay(vp->bmp, src_frame)
									SDL_VoutUnlockYUVOverlay(vp->bmp);
										overlay->unlock(overlay);
									frame_queue_push(&is->pictq);//入列
									ffp_notify_msg1(ffp, FFP_MSG_VIDEO_DECODED_START);
										
								SDL_VoutAndroid_releaseBufferProxyP(opaque->weak_vout, (SDL_AMediaCodecBufferProxy **)&frame->opaque, false);
								
													
					}
					opaque->acodec = SDL_AMediaCodecJava_createByCodecName(env, ffp->mediacodec_default_name);
						J4AC_android_media_MediaCodec__createByCodecName__withCString__catchAll
							(*env)->CallStaticObjectMethod(env, class_J4AC_android_media_MediaCodec.id, class_J4AC_android_media_MediaCodec.method_createByCodecName, name);
						SDL_AMediaCodec* acodec = SDL_AMediaCodecJava_init(env, android_media_codec);
							SDL_AMediaCodec_Opaque *opaque = acodec->opaque;
							opaque->android_media_codec         = global_android_media_codec;

							acodec->opaque_class                = &g_amediacodec_class;
							acodec->func_delete                 = SDL_AMediaCodecJava_delete;
							acodec->func_configure              = NULL;
							acodec->func_configure_surface      = SDL_AMediaCodecJava_configure_surface;

							acodec->func_start                  = SDL_AMediaCodecJava_start;
							acodec->func_stop                   = SDL_AMediaCodecJava_stop;
							acodec->func_flush                  = SDL_AMediaCodecJava_flush;
							acodec->func_flush                  = SDL_AMediaCodecJava_flush;

							acodec->func_writeInputData         = SDL_AMediaCodecJava_writeInputData;

							acodec->func_dequeueInputBuffer     = SDL_AMediaCodecJava_dequeueInputBuffer;
							acodec->func_queueInputBuffer       = SDL_AMediaCodecJava_queueInputBuffer;

							acodec->func_dequeueOutputBuffer    = SDL_AMediaCodecJava_dequeueOutputBuffer;//use
							acodec->func_getOutputFormat        = SDL_AMediaCodecJava_getOutputFormat;
							acodec->func_releaseOutputBuffer    = SDL_AMediaCodecJava_releaseOutputBuffer;

							acodec->func_isInputBuffersValid    = SDL_AMediaCodecJava_isInputBuffersValid;
			if (!node) {
				node = ffpipenode_create_video_decoder_from_ffplay(ffp);//软解
					node->func_run_sync = func_run_sync; //ffpipenode_ffplay_vdec.c
						ffp_video_thread(opaque->ffp)    //ff_ffplay.c
							ffplay_video_thread(void *arg)
								get_video_frame(ffp, frame)
								ret = queue_picture(ffp, frame, pts, duration, frame->pkt_pos, is->viddec.pkt_serial);
									vp = frame_queue_peek_writable(&is->pictq)
									alloc_picture(ffp, src_frame->format); //ff_ffplay.c
									    SDL_VoutSetOverlayFormat(ffp->vout, ffp->overlay_format);
										vp->bmp = SDL_Vout_CreateOverlay(vp->width, vp->height,
																	   frame_format,
																	   ffp->vout);
											func_create_overlay //ijksdl_vout_android_nativewindow.c
												func_create_overlay_l(width, height, frame_format, vout);
													SDL_VoutFFmpeg_CreateOverlay
														opaque_setup_frame(opaque, ff_format, buf_width, buf_height);
															    av_image_fill_arrays(managed_frame->data, managed_frame->linesize ,NULL,
																						format, width, height, 1);
														overlay_fill(overlay, opaque->managed_frame, opaque->planes);
															overlay->pixels[i] = frame->data[i];
															overlay->pitches[i] = frame->linesize[i];
										vp->allocated = 1;
										SDL_CondSignal(is->pictq.cond);
									SDL_VoutLockYUVOverlay(vp->bmp);
										overlay->lock               = func_lock;//Ijksdl_vout_overlay_ffmpeg.c
											SDL_LockMutex(opaque->mutex);
									SDL_VoutFillFrameYUVOverlay(vp->bmp, src_frame) 
										func_fill_frame(SDL_VoutOverlay *overlay, const AVFrame *frame)//Ijksdl_vout_overlay_ffmpeg.c
										    case SDL_FCC_YV12:
												need_swap_uv = 1;
												use_linked_frame = 1;
												dst_format = frame->format;
												dst_format = AV_PIX_FMT_YUV420P;
												overlay_fill(overlay, opaque->linked_frame, opaque->planes);//直接保存yuv 分量
													overlay->pixels[i] = frame->data[i];
													overlay->pitches[i] = frame->linesize[i];
												if (need_swap_uv)
													FFSWAP(Uint8*, overlay->pixels[1], overlay->pixels[2]);
											case SDL_FCC_RV32:
												dst_format = AV_PIX_FMT_0BGR32;
											AVFrame* managed_frame = opaque_obtain_managed_frame_buffer(opaque);//yuv -> rgb
												av_image_get_buffer_size(managed_frame->format, managed_frame->width, managed_frame->height, 1);
												av_image_fill_arrays(managed_frame->data, managed_frame->linesize,
													frame_buffer_ref->data, managed_frame->format, managed_frame->width, managed_frame->height, 1);
											overlay_fill(overlay, opaque->managed_frame, opaque->planes);
											    overlay->pixels[i] = frame->data[i];
												overlay->pitches[i] = frame->linesize[i];
											ijk_image_convert(frame->width, frame->height,
													dst_format, swscale_dst_pic.data, swscale_dst_pic.linesize,
													frame->format, (const uint8_t**) frame->data, frame->linesize)
												            case AV_PIX_FMT_0BGR32:
												I420ToABGR(
													src_data[0], src_linesize[0],
													src_data[1], src_linesize[1],
													src_data[2], src_linesize[2],
													dst_data[0], dst_linesize[0],//先使用libyuv转换，如果失败，使用ffmpeg api转换
													width, height);
											opaque->img_convert_ctx = sws_getCachedContext(opaque->img_convert_ctx,
													frame->width, frame->height, frame->format, frame->width, frame->height,
													dst_format, opaque->sws_flags, NULL, NULL, NULL);
													SDL_VoutUnlockYUVOverlay(vp->bmp);
											sws_scale(opaque->img_convert_ctx, (const uint8_t**) frame->data, frame->linesize,
													0, frame->height, swscale_dst_pic.data, swscale_dst_pic.linesize);
													
												
										SDL_UnlockMutex(opaque->mutex);
									frame_queue_push(&is->pictq);
					ffp_set_video_codec_info(ffp, AVCODEC_MODULE_NAME, avcodec_get_name(ffp->is->viddec.avctx->codec_id));
					
			}

			
mp->ffplayer->vout = SDL_VoutAndroid_CreateForAndroidSurface(); //ijkplayer_android.c
	SDL_VoutAndroid_CreateForANativeWindow() //ijksdl_vout_android_nativewindow.c
		opaque->egl = IJK_EGL_create();
		if (!opaque->egl)
			goto fail;

		vout->opaque_class    = &g_nativewindow_class;
		vout->create_overlay  = func_create_overlay;
		vout->free_l          = func_free_l;
		vout->display_overlay = func_display_overlay;
			func_display_overlay_l(vout, overlay);
				//硬解
				case SDL_FCC__AMC: {//硬解，走这里，直接渲染到surface，不用处理解码后的数据
				// only ANativeWindow support
				IJK_EGL_terminate(opaque->egl);
				return SDL_VoutOverlayAMediaCodec_releaseFrame_l(overlay, NULL, true);
			
				//软解并且直接显示
				case SDL_FCC_RV32: {//之前已经将yuv转为rgba了
					SDL_Android_NativeWindow_display_l(native_window, overlay); 
						int render_ret = voutDesc->render(&out_buffer, overlay);
							android_render_on_rgb8888 //android_nativewindow.c
								android_render_rgb_on_rgb(out_buffer, overlay, 32);
									memcpy(dst_pixels, src_pixels, plane_size);//显示
									av_image_copy_plane(dst_pixels, dst_line_size, src_pixels, src_line_size, bytewidth, min_height);//显示
			
				case SDL_FCC_YV12://软解 opengle es渲染  YYYYYYYY VV UU    =>YUV420P   planar
				 if (vout->overlay_format == SDL_FCC__GLES2 && opaque->egl)
					return IJK_EGL_display(opaque->egl, native_window, overlay);
						IJK_EGL_display(opaque->egl, native_window, overlay);
							IJK_EGL_makeCurrent(egl, window) //ijksdl_egl.c
								EGLDisplay display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
								eglInitialize(display, &major, &minor)
								eglChooseConfig(display, configAttribs, &config, 1, &numConfig)
								eglGetConfigAttrib(display, config, EGL_NATIVE_VISUAL_ID, &native_visual_id)
								EGLSurface surface = eglCreateWindowSurface(display, config, window, NULL);
								EGLSurface context = eglCreateContext(display, config, EGL_NO_CONTEXT, contextAttribs);
								eglMakeCurrent(display, surface, surface, context)
								IJK_GLES2_Renderer_setupGLES();
									glClearColor(0.0f, 0.0f, 0.0f, 1.0f);       IJK_GLES2_checkError_TRACE("glClearColor");
									glEnable(GL_CULL_FACE);                     IJK_GLES2_checkError_TRACE("glEnable(GL_CULL_FACE)");
									glCullFace(GL_BACK);                        IJK_GLES2_checkError_TRACE("glCullFace");
									glDisable(GL_DEPTH_TEST);
								egl->context = context;
								egl->surface = surface;
								egl->display = display;
							IJK_EGL_display_internal(egl, window, overlay);
								IJK_EGL_prepareRenderer(egl, overlay)
									IJK_GLES2_Renderer_reset(opaque->renderer);
										glDeleteShader(renderer->vertex_shader);
										glDeleteShader(renderer->fragment_shader);
										glDeleteProgram(renderer->program);
										renderer->vertex_shader   = 0;
										renderer->fragment_shader = 0;
										renderer->program         = 0;
										glDeleteTextures(1, &renderer->plane_textures[i]);
										renderer->plane_textures[i] = 0;
									IJK_GLES2_Renderer_freeP(&opaque->renderer);
									opaque->renderer = IJK_GLES2_Renderer_create(overlay);
										case SDL_FCC_YV12:      renderer = IJK_GLES2_Renderer_create_yuv420p(); break;
											IJK_GLES2_Renderer *renderer = IJK_GLES2_Renderer_create_base(IJK_GLES2_getFragmentShader_yuv420p());
												========== IJK_GLES2_getFragmentShader_yuv420p()
													precision highp float;
													varying   highp vec2 vv2_Texcoord;
													uniform         mat3 um3_ColorConversion;
													uniform   lowp  sampler2D us2_SamplerX;
													uniform   lowp  sampler2D us2_SamplerY;
													uniform   lowp  sampler2D us2_SamplerZ;

													void main()
													{
														mediump vec3 yuv;
														lowp    vec3 rgb;

														yuv.x = (texture2D(us2_SamplerX, vv2_Texcoord).r - (16.0 / 255.0));
														yuv.y = (texture2D(us2_SamplerY, vv2_Texcoord).r - 0.5);
														yuv.z = (texture2D(us2_SamplerZ, vv2_Texcoord).r - 0.5);
														rgb = um3_ColorConversion * yuv;
														gl_FragColor = vec4(rgb, 1);
													}
												==========
												renderer->vertex_shader = IJK_GLES2_loadShader(GL_VERTEX_SHADER, IJK_GLES2_getVertexShader_default());
													======== IJK_GLES2_getVertexShader_default
													precision highp float;
													varying   highp vec2 vv2_Texcoord;
													attribute highp vec4 av4_Position;
													attribute highp vec2 av2_Texcoord;
													uniform         mat4 um4_ModelViewProjection;

													void main()
													{
														gl_Position  = um4_ModelViewProjection * av4_Position;
														vv2_Texcoord = av2_Texcoord.xy;
													}
													========
												renderer->fragment_shader = IJK_GLES2_loadShader(GL_FRAGMENT_SHADER, fragment_shader_source);
												renderer->program = glCreateProgram(); 
												glAttachShader(renderer->program, renderer->vertex_shader);
												glAttachShader(renderer->program, renderer->fragment_shader);
												glLinkProgram(renderer->program);
												glGetProgramiv(renderer->program, GL_LINK_STATUS, &link_status);
												renderer->av4_position = glGetAttribLocation(renderer->program, "av4_Position");
												renderer->av2_texcoord = glGetAttribLocation(renderer->program, "av2_Texcoord");
												renderer->um4_mvp      = glGetUniformLocation(renderer->program, "um4_ModelViewProjection");
											if (!renderer)
												goto fail;

											renderer->us2_sampler[0] = glGetUniformLocation(renderer->program, "us2_SamplerX");
											renderer->us2_sampler[1] = glGetUniformLocation(renderer->program, "us2_SamplerY");
											renderer->us2_sampler[2] = glGetUniformLocation(renderer->program, "us2_SamplerZ");
											renderer->um3_color_conversion = glGetUniformLocation(renderer->program, "um3_ColorConversion")

											renderer->func_use            = yuv420p_use;
												yuv420p_use(IJK_GLES2_Renderer *renderer)//renderer_yuv420p.c
											renderer->func_getBufferWidth = yuv420p_getBufferWidth;
												yuv420p_getBufferWidth(IJK_GLES2_Renderer *renderer, SDL_VoutOverlay *overlay)//renderer_yuv420p.c
													overlay->pitches[0] / 1
											renderer->func_uploadTexture  = yuv420p_uploadTexture;
												yuv420p_uploadTexture(IJK_GLES2_Renderer *renderer, SDL_VoutOverlay *overlay)//renderer_yuv420p.c

									IJK_GLES2_Renderer_use(opaque->renderer)
										renderer->func_use(renderer)
											yuv420p_use(IJK_GLES2_Renderer *renderer)//renderer_yuv420p.c
												glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
												glUseProgram(renderer->program); 
												glGenTextures(3, renderer->plane_textures);
												
												glActiveTexture(GL_TEXTURE0 + i);//0 1 2
												glBindTexture(GL_TEXTURE_2D, renderer->plane_textures[i]);
												glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
												glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
												glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
												glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
												glUniform1i(renderer->us2_sampler[i], i);
												
												glUniformMatrix3fv(renderer->um3_color_conversion, 1, GL_FALSE, IJK_GLES2_getColorMatrix_bt709());
													=====  IJK_GLES2_getColorMatrix_bt709()
													static const GLfloat g_bt709[] = {
														1.164,  1.164,  1.164,
														0.0,   -0.213,  2.112,
														1.793, -0.533,  0.0,
													};
													const GLfloat *IJK_GLES2_getColorMatrix_bt709()
													{
														return g_bt709;
													}
													=====
											IJK_GLES2_loadOrtho(&modelViewProj, -1.0f, 1.0f, -1.0f, 1.0f, -1.0f, 1.0f);//初始化 modelViewProj.m
												matrix->m[0] = 2.0f / r_l;// 1
												matrix->m[1] = 0.0f;
												matrix->m[2] = 0.0f;
												matrix->m[3] = 0.0f;

												matrix->m[4] = 0.0f;
												matrix->m[5] = 2.0f / t_b;// 1
												matrix->m[6] = 0.0f;
												matrix->m[7] = 0.0f;

												matrix->m[8] = 0.0f;
												matrix->m[9] = 0.0f;
												matrix->m[10] = -2.0f / f_n; // -1
												matrix->m[11] = 0.0f;

												matrix->m[12] = tx; // 0
												matrix->m[13] = ty; // 0
												matrix->m[14] = tz; // 0
												matrix->m[15] = 1.0f;
											glUniformMatrix4fv(renderer->um4_mvp, 1, GL_FALSE, modelViewProj.m);
											IJK_GLES2_Renderer_TexCoords_reset(renderer);
												renderer->texcoords[0] = 0.0f;
												renderer->texcoords[1] = 1.0f;
												renderer->texcoords[2] = 1.0f;
												renderer->texcoords[3] = 1.0f;
												renderer->texcoords[4] = 0.0f;
												renderer->texcoords[5] = 0.0f;
												renderer->texcoords[6] = 1.0f;
												renderer->texcoords[7] = 0.0f;
											IJK_GLES2_Renderer_TexCoords_reloadVertex(renderer);
												glVertexAttribPointer(renderer->av2_texcoord, 2, GL_FLOAT, GL_FALSE, 0, renderer->texcoords); 
												glEnableVertexAttribArray(renderer->av2_texcoord);
											IJK_GLES2_Renderer_Vertices_reset(renderer);
												renderer->vertices[0] = -1.0f;
												renderer->vertices[1] = -1.0f;
												renderer->vertices[2] =  1.0f;
												renderer->vertices[3] = -1.0f;
												renderer->vertices[4] = -1.0f;
												renderer->vertices[5] =  1.0f;
												renderer->vertices[6] =  1.0f;
												renderer->vertices[7] =  1.0f;
											IJK_GLES2_Renderer_Vertices_reloadVertex(renderer);
												glVertexAttribPointer(renderer->av4_position, 2, GL_FLOAT, GL_FALSE, 0, renderer->vertices);
												glEnableVertexAttribArray(renderer->av4_position);
									IJK_EGL_setSurfaceSize(egl, overlay->w, overlay->h)
										egl->width  = IJK_EGL_getSurfaceWidth(egl);
											eglQuerySurface(egl->display, egl->surface, EGL_WIDTH, &width)
										egl->height = IJK_EGL_getSurfaceHeight(egl);
											eglQuerySurface(egl->display, egl->surface, EGL_HEIGHT, &height)
									glViewport(0, 0, egl->width, egl->height);  IJK_GLES2_checkError_TRACE("glViewport");//上面的调用一次
								IJK_GLES2_Renderer_renderOverlay(opaque->renderer, overlay) //绘制 renderer.c
									glClear(GL_COLOR_BUFFER_BIT);
									renderer->last_buffer_width = renderer->func_getBufferWidth(renderer, overlay);
									renderer->func_uploadTexture(renderer, overlay)
										yuv420p_uploadTexture //renderer_yuv420p.c
										    glBindTexture(GL_TEXTURE_2D, renderer->plane_textures[i]);//i   0 1 2 渲染yuv
											glTexImage2D(GL_TEXTURE_2D,0,GL_LUMINANCE,widths[plane],heights[plane],0,GL_LUMINANCE,GL_UNSIGNED_BYTE,pixels[plane]);
											
									IJK_GLES2_Renderer_Vertices_apply(renderer);
									IJK_GLES2_Renderer_Vertices_reloadVertex(renderer);
									IJK_GLES2_Renderer_TexCoords_reset(renderer);
									IJK_GLES2_Renderer_TexCoords_cropRight(renderer, padding_normalized);
									IJK_GLES2_Renderer_TexCoords_reloadVertex(renderer);
									
									glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
								eglSwapBuffers(egl->display, egl->surface);
								
--------------------					
				ret = stream_component_open(ffp, st_index[AVMEDIA_TYPE_VIDEO]);//"ff_read"  //ff_ffplay.c
					decoder_init(&is->viddec, avctx, &is->videoq, is->continue_read_thread);
					ffp->node_vdec = ffpipeline_open_video_decoder(ffp->pipeline, ffp);
						pipeline->func_open_video_decoder(pipeline, ffp); //获取硬解java层解码器名称
					decoder_start(&is->viddec, video_thread, ffp, "ff_video_dec")
						video_thread
							ffpipenode_run_sync(ffp->node_vdec);
								node->func_run_sync(node);

							
		
				ret = av_read_frame(ic, pkt);//ff_read 线程当中
				packet_queue_put(&is->audioq, pkt);//A1
				packet_queue_put(&is->videoq, pkt);
				packet_queue_put(&is->subtitleq, pkt);
/////////////////////////////////////////////////////////////////////////////////////////////		
_start();
IjkMediaPlayer_start //ijkplayer_jni.c
	ijkmp_start(IjkMediaPlayer *mp) //ijkplayer.c
		ijkmp_start_l(mp);
			ffp_notify_msg1(mp->ffplayer, FFP_REQ_START);
				ijkmp_get_msg //Ijkplayer.c 
					case FFP_REQ_START:
						ijkmp_change_state_l(mp, MP_STATE_STARTED);
							ffp_notify_msg1(mp->ffplayer, FFP_MSG_PLAYBACK_STATE_CHANGED);
				
IjkMediaPlayer_native_setup //初始化函数指针
	mp->msg_loop = msg_loop;			



JNI_OnLoad
ijkmp_global_init();

ijkplayer.c
ijkmp_global_init

ff_ffplay.c
ffp_global_init

allformats.c
#ifdef __ANDROID__
    IJK_REGISTER_PROTOCOL(ijkmediadatasource);
#endif
    IJK_REGISTER_PROTOCOL(ijkio);
    IJK_REGISTER_PROTOCOL(async);
    IJK_REGISTER_PROTOCOL(ijklongurl);
    IJK_REGISTER_PROTOCOL(ijktcphook);
    IJK_REGISTER_PROTOCOL(ijkhttphook);
    IJK_REGISTER_PROTOCOL(ijksegment);
    /* demuxers */
    IJK_REGISTER_DEMUXER(ijklivehook);


#define IJK_REGISTER_PROTOCOL(x)                                        \
{                                                                   \
	extern URLProtocol ijkimp_ff_##x##_protocol;                        \
	int ijkav_register_##x##_protocol(URLProtocol *protocol, int protocol_size);\
	ijkav_register_##x##_protocol(&ijkimp_ff_##x##_protocol, sizeof(URLProtocol));  \
}
	
ijkmediadatasource.c
{
	extern URLProtocol ijkimp_ff_ijkmediadatasource_protocol;						\
	int ijkav_register_ijkmediadatasource_protocol(URLProtocol *protocol, int protocol_size);\
	ijkav_register_ijkmediadatasource_protocol(&ijkimp_ff_ijkmediadatasource_protocol, sizeof(URLProtocol));	\
}

ijkio.c
{																	\
	extern URLProtocol ijkimp_ff_ijkio_protocol;						\
	int ijkav_register_ijkio_protocol(URLProtocol *protocol, int protocol_size);\
	ijkav_register_ijkio_protocol(&ijkimp_ff_ijkio_protocol, sizeof(URLProtocol));	\
}

ijkasync.c
{                                                                   \
	extern URLProtocol ijkimp_ff_async_protocol;                        \
	int ijkav_register_async_protocol(URLProtocol *protocol, int protocol_size);\
	ijkav_register_async_protocol(&ijkimp_ff_async_protocol, sizeof(URLProtocol));  \
}

ijklongurl.c
{                                                                   \
	extern URLProtocol ijkimp_ff_ijklongurl_protocol;                        \
	int ijkav_register_ijklongurl_protocol(URLProtocol *protocol, int protocol_size);\
	ijkav_register_ijklongurl_protocol(&ijkimp_ff_ijklongurl_protocol, sizeof(URLProtocol));  \
}

ijkurlhook.c
{                                                                   \
	extern URLProtocol ijkimp_ff_ijktcphook_protocol;                        \
	int ijkav_register_ijktcphook_protocol(URLProtocol *protocol, int protocol_size);\
	ijkav_register_ijktcphook_protocol(&ijkimp_ff_ijktcphook_protocol, sizeof(URLProtocol));  \
}

ijkurlhook.c
{                                                                   \
	extern URLProtocol ijkimp_ff_ijkhttphook_protocol;                        \
	int ijkav_register_ijkhttphook_protocol(URLProtocol *protocol, int protocol_size);\
	ijkav_register_ijkhttphook_protocol(&ijkimp_ff_ijkhttphook_protocol, sizeof(URLProtocol));  \
}

ijksegment.c
{                                                                   \
	extern URLProtocol ijkimp_ff_ijksegment_protocol;                        \
	int ijkav_register_ijksegment_protocol(URLProtocol *protocol, int protocol_size);\
	ijkav_register_ijksegment_protocol(&ijkimp_ff_ijksegment_protocol, sizeof(URLProtocol));  \
}

/////////////////////////////////////////////////////////////////////////////////////////////////////////////////

#define IJK_REGISTER_DEMUXER(x)                                         \
    {                                                                   \
        extern AVInputFormat ijkff_##x##_demuxer;                       \
        ijkav_register_input_format(&ijkff_##x##_demuxer);              \
    }

	
IJK_REGISTER_DEMUXER(ijklivehook);

    {                                                                   \
        extern AVInputFormat ijkff_ijklivehook_demuxer;                       \
        ijkav_register_input_format(&ijkff_ijklivehook_demuxer);              \
    }


/////////////////////////////////////////////////////////////////////////////////////////////////////////////////
Ijkutils.c

IJK_FF_PROTOCOL(async);
#define IJK_FF_PROTOCOL(x)                                                                          \
extern URLProtocol ff_##x##_protocol;                                                               \
int ijkav_register_##x##_protocol(URLProtocol *protocol, int protocol_size);                        \
int ijkav_register_##x##_protocol(URLProtocol *protocol, int protocol_size)                         \
{                                                                                                   \
    if (protocol_size != sizeof(URLProtocol)) {                                                     \
        av_log(NULL, AV_LOG_ERROR, "ijkav_register_##x##_protocol: ABI mismatch.\n");               \
        return -1;                                                                                  \
    }                                                                                               \
    memcpy(&ff_##x##_protocol, protocol, protocol_size);                                            \
    return 0;                                                                                       \
}

extern URLProtocol ff_async_protocol;                                                               \
int ijkav_register_async_protocol(URLProtocol *protocol, int protocol_size);                        \
int ijkav_register_async_protocol(URLProtocol *protocol, int protocol_size)                         \
{                                                                                                   \
    if (protocol_size != sizeof(URLProtocol)) {                                                     \
        av_log(NULL, AV_LOG_ERROR, "ijkav_register_async_protocol: ABI mismatch.\n");               \
        return -1;                                                                                  \
    }                                                                                               \
    memcpy(&ff_async_protocol, protocol, protocol_size);                                            \
    return 0;                                                                                       \
}


//   ##    连接符 
//   #@　加单引号，字符常量 
//   #      加双引号，字符串

IJK_DUMMY_PROTOCOL(ijkmediadatasource);
#define IJK_DUMMY_PROTOCOL(x)                                       \
IJK_FF_PROTOCOL(x);                                                 \
static const AVClass ijk_##x##_context_class = {                    \
    .class_name = #x,                                               \
    .item_name  = av_default_item_name,                             \
    .version    = LIBAVUTIL_VERSION_INT,                            \
    };                                                              \
                                                                    \
URLProtocol ff_##x##_protocol = {                                   \
    .name                = #x,                                      \
    .url_open2           = ijkdummy_open,                           \
    .priv_data_size      = 1,                                       \
    .priv_data_class     = &ijk_##x##_context_class,                \
};

#define IJK_FF_PROTOCOL(x)                                                                          \
extern URLProtocol ff_##x##_protocol;                                                               \
int ijkav_register_##x##_protocol(URLProtocol *protocol, int protocol_size);                        \
int ijkav_register_##x##_protocol(URLProtocol *protocol, int protocol_size)                         \
{                                                                                                   \
    if (protocol_size != sizeof(URLProtocol)) {                                                     \
        av_log(NULL, AV_LOG_ERROR, "ijkav_register_##x##_protocol: ABI mismatch.\n");               \
        return -1;                                                                                  \
    }                                                                                               \
    memcpy(&ff_##x##_protocol, protocol, protocol_size);                                            \
    return 0;                                                                                       \
}


extern URLProtocol ff_ijkmediadatasource_protocol;                                                               \
int ijkav_register_ijkmediadatasource_protocol(URLProtocol *protocol, int protocol_size);                        \
int ijkav_register_ijkmediadatasource_protocol(URLProtocol *protocol, int protocol_size)                         \
{                                                                                                   \
    if (protocol_size != sizeof(URLProtocol)) {                                                     \
        av_log(NULL, AV_LOG_ERROR, "ijkav_register_ijkmediadatasource_protocol: ABI mismatch.\n");               \
        return -1;                                                                                  \
    }                                                                                               \
    memcpy(&ff_ijkmediadatasource_protocol, protocol, protocol_size);                                            \
    return 0;                                                                                       \
}
static const AVClass ijk_ijkmediadatasource_context_class = {                    \
    .class_name = "ijkmediadatasource",                                               \
    .item_name  = av_default_item_name,                             \
    .version    = LIBAVUTIL_VERSION_INT,                            \
    };                                                              \
                                                                    \
URLProtocol ff_ijkmediadatasource_protocol = {                                   \
    .name                = "ijkmediadatasource",                                      \
    .url_open2           = ijkdummy_open,                           \
    .priv_data_size      = 1,                                       \
    .priv_data_class     = &ijk_ijkmediadatasource_context_class,                \
};


IJK_DUMMY_PROTOCOL(ijkhttphook);

extern URLProtocol ff_ijkhttphook_protocol;                                                               \
int ijkav_register_ijkhttphook_protocol(URLProtocol *protocol, int protocol_size);                        \
int ijkav_register_ijkhttphook_protocol(URLProtocol *protocol, int protocol_size)                         \
{                                                                                                   \
    if (protocol_size != sizeof(URLProtocol)) {                                                     \
        av_log(NULL, AV_LOG_ERROR, "ijkav_register_##x##_protocol: ABI mismatch.\n");               \
        return -1;                                                                                  \
    }                                                                                               \
    memcpy(&ff_ijkhttphook_protocol, protocol, protocol_size);                                            \
    return 0;                                                                                       \
}
static const AVClass ijk_ijkhttphook_context_class = {                    \
    .class_name = "ijkhttphook",                                               \
    .item_name  = av_default_item_name,                             \
    .version    = LIBAVUTIL_VERSION_INT,                            \
    };                                                              \
                                                                    \
URLProtocol ff_ijkhttphook_protocol = {                                   \
    .name                = "ijkhttphook",                                      \
    .url_open2           = ijkdummy_open,                           \
    .priv_data_size      = 1,                                       \
    .priv_data_class     = &ijk_ijkhttphook_context_class,                \
};


IJK_DUMMY_PROTOCOL(ijklongurl);

extern URLProtocol ff_ijklongurl_protocol;                                                               \
int ijkav_register_ijklongurl_protocol(URLProtocol *protocol, int protocol_size);                        \
int ijkav_register_ijklongurl_protocol(URLProtocol *protocol, int protocol_size)                         \
{                                                                                                   \
    if (protocol_size != sizeof(URLProtocol)) {                                                     \
        av_log(NULL, AV_LOG_ERROR, "ijkav_register_ijklongurl_protocol: ABI mismatch.\n");               \
        return -1;                                                                                  \
    }                                                                                               \
    memcpy(&ff_ijklongurl_protocol, protocol, protocol_size);                                            \
    return 0;                                                                                       \
}
static const AVClass ijk_ijklongurl_context_class = {                    \
    .class_name = "ijklongurl",                                               \
    .item_name  = av_default_item_name,                             \
    .version    = LIBAVUTIL_VERSION_INT,                            \
    };                                                              \
                                                                    \
URLProtocol ff_ijklongurl_protocol = {                                   \
    .name                = #x,                                      \
    .url_open2           = ijkdummy_open,                           \
    .priv_data_size      = 1,                                       \
    .priv_data_class     = &ijk_ijklongurl_context_class,                \
};


IJK_DUMMY_PROTOCOL(ijksegment);

extern URLProtocol ff_ijksegment_protocol;                                                               \
int ijkav_register_ijksegment_protocol(URLProtocol *protocol, int protocol_size);                        \
int ijkav_register_ijksegment_protocol(URLProtocol *protocol, int protocol_size)                         \
{                                                                                                   \
    if (protocol_size != sizeof(URLProtocol)) {                                                     \
        av_log(NULL, AV_LOG_ERROR, "ijkav_register_ijksegment_protocol: ABI mismatch.\n");               \
        return -1;                                                                                  \
    }                                                                                               \
    memcpy(&ff_ijksegment_protocol, protocol, protocol_size);                                            \
    return 0;                                                                                       \
}
static const AVClass ijk_ijksegment_context_class = {                    \
    .class_name = "ijksegment",                                               \
    .item_name  = av_default_item_name,                             \
    .version    = LIBAVUTIL_VERSION_INT,                            \
    };                                                              \
                                                                    \
URLProtocol ff_ijksegment_protocol = {                                   \
    .name                = "ijksegment",                                      \
    .url_open2           = ijkdummy_open,                           \
    .priv_data_size      = 1,                                       \
    .priv_data_class     = &ijk_ijksegment_context_class,                \
};


IJK_DUMMY_PROTOCOL(ijktcphook);

extern URLProtocol ff_ijktcphook_protocol;                                                               \
int ijkav_register_ijktcphook_protocol(URLProtocol *protocol, int protocol_size);                        \
int ijkav_register_ijktcphook_protocol(URLProtocol *protocol, int protocol_size)                         \
{                                                                                                   \
    if (protocol_size != sizeof(URLProtocol)) {                                                     \
        av_log(NULL, AV_LOG_ERROR, "ijkav_register_ijktcphook_protocol: ABI mismatch.\n");               \
        return -1;                                                                                  \
    }                                                                                               \
    memcpy(&ff_ijktcphook_protocol, protocol, protocol_size);                                            \
    return 0;                                                                                       \
}
static const AVClass ijk_ijktcphook_context_class = {                    \
    .class_name = "ijktcphook",                                               \
    .item_name  = av_default_item_name,                             \
    .version    = LIBAVUTIL_VERSION_INT,                            \
    };                                                              \
                                                                    \
URLProtocol ff_ijktcphook_protocol = {                                   \
    .name                = "ijktcphook",                                      \
    .url_open2           = ijkdummy_open,                           \
    .priv_data_size      = 1,                                       \
    .priv_data_class     = &ijk_ijktcphook_context_class,                \
};

IJK_DUMMY_PROTOCOL(ijkio);

extern URLProtocol ff_ijkio_protocol;                                                               \
int ijkav_register_ijkio_protocol(URLProtocol *protocol, int protocol_size);                        \
int ijkav_register_ijkio_protocol(URLProtocol *protocol, int protocol_size)                         \
{                                                                                                   \
    if (protocol_size != sizeof(URLProtocol)) {                                                     \
        av_log(NULL, AV_LOG_ERROR, "ijkav_register_ijkio_protocol: ABI mismatch.\n");               \
        return -1;                                                                                  \
    }                                                                                               \
    memcpy(&ff_ijkio_protocol, protocol, protocol_size);                                            \
    return 0;                                                                                       \
}
static const AVClass ijk_ijkio_context_class = {                    \
    .class_name = "ijkio",                                               \
    .item_name  = av_default_item_name,                             \
    .version    = LIBAVUTIL_VERSION_INT,                            \
    };                                                              \
                                                                    \
URLProtocol ff_ijkio_protocol = {                                   \
    .name                = "ijkio",                                      \
    .url_open2           = ijkdummy_open,                           \
    .priv_data_size      = 1,                                       \
    .priv_data_class     = &ijk_ijkio_context_class,                \
};


Protocols.c
Protocol_list.c


//log
2019-04-02 15:52:07.534 2446-6389/tv.danmaku.ijk.media.example E/IJKMEDIA: func_display_overlay_l: overlay->format = YV12
2019-04-02 15:52:07.534 2446-6389/tv.danmaku.ijk.media.example E/IJKMEDIA: func_display_overlay_l: vout->overlay_format = _ES2


2019-03-20 10:41:04.038 17674-17674/tv.danmaku.ijk.media.example I/IJKMEDIA: ===== versions =====
2019-03-20 10:41:04.038 17674-17674/tv.danmaku.ijk.media.example I/IJKMEDIA: ijkplayer    : k0.8.8
2019-03-20 10:41:04.038 17674-18107/tv.danmaku.ijk.media.example I/IJKMEDIA: SDL_RunThread: [18107] ff_msg_loop
2019-03-20 10:41:04.038 17674-17674/tv.danmaku.ijk.media.example I/IJKMEDIA: FFmpeg       : ff3.4--ijk0.8.7--20180103--001
2019-03-20 10:41:04.038 17674-18107/tv.danmaku.ijk.media.example D/IJKMEDIA: message_loop
2019-03-20 10:41:04.038 17674-17674/tv.danmaku.ijk.media.example I/IJKMEDIA: libavutil    : 55.78.100
2019-03-20 10:41:04.038 17674-17674/tv.danmaku.ijk.media.example I/IJKMEDIA: libavcodec   : 57.107.100
2019-03-20 10:41:04.038 17674-17674/tv.danmaku.ijk.media.example I/IJKMEDIA: libavformat  : 57.83.100
2019-03-20 10:41:04.038 17674-17674/tv.danmaku.ijk.media.example I/IJKMEDIA: libswscale   : 4.8.100
2019-03-20 10:41:04.038 17674-17674/tv.danmaku.ijk.media.example I/IJKMEDIA: libswresample: 2.9.100
2019-03-20 10:41:04.038 17674-17674/tv.danmaku.ijk.media.example I/IJKMEDIA: ===== options =====
2019-03-20 10:41:04.038 17674-17674/tv.danmaku.ijk.media.example I/IJKMEDIA: player-opts : mediacodec                   = 0
2019-03-20 10:41:04.038 17674-17674/tv.danmaku.ijk.media.example I/IJKMEDIA: player-opts : opensles                     = 0
2019-03-20 10:41:04.038 17674-17674/tv.danmaku.ijk.media.example I/IJKMEDIA: player-opts : overlay-format               = 842225234
2019-03-20 10:41:04.038 17674-17674/tv.danmaku.ijk.media.example I/IJKMEDIA: player-opts : framedrop                    = 1
2019-03-20 10:41:04.038 17674-17674/tv.danmaku.ijk.media.example I/IJKMEDIA: player-opts : start-on-prepared            = 0
2019-03-20 10:41:04.038 17674-17674/tv.danmaku.ijk.media.example I/IJKMEDIA: format-opts : ijkapplication               = -599194304
2019-03-20 10:41:04.038 17674-17674/tv.danmaku.ijk.media.example I/IJKMEDIA: format-opts : ijkiomanager                 = -594024704
2019-03-20 10:41:04.038 17674-17674/tv.danmaku.ijk.media.example I/IJKMEDIA: format-opts : http-detect-range-support    = 0
2019-03-20 10:41:04.038 17674-17674/tv.danmaku.ijk.media.example I/IJKMEDIA: codec-opts  : skip_loop_filter             = 48
2019-03-20 10:41:04.038 17674-17674/tv.danmaku.ijk.media.example I/IJKMEDIA: ===================


