最近在看ffmpeg相关的一些东西,以及一些播放器相关资料和代码。
然后对于ffmpeg-2.0.1版本下的ffplay进行了大概的代码阅读,其中这里把里面的音视频同步,按个人的理解,暂时在这里作个笔记。
在ffplay2.0.1版本里面,视频的刷新不再直接使用SDL里面的定时器了,而是在主的循环中event_loop中,通过调用函数refresh_loop_wait_event来等待事件,
同时在这个refresh_loop_wait_event函数里面,通过使用休眠函数av_usleep 来进行定时刷新视频。
调用视频更新的代码:
1 static void refresh_loop_wait_event(VideoState *is, SDL_Event *event) {
2 double remaining_time = 0.0;
3 SDL_PumpEvents();/*不停的循环内部更新消息*/
4 while (!SDL_PeepEvents(event, 1, SDL_GETEVENT, SDL_ALLEVENTS)) {/*check the event queue for messages*/
5 if (!cursor_hidden && av_gettime() - cursor_last_shown > CURSOR_HIDE_DELAY) {
6 SDL_ShowCursor(0);
7 cursor_hidden = 1;
8 }
9 if (remaining_time > 0.0)
10 av_usleep((int64_t)(remaining_time * 1000000.0));/*使用这个函数来休眠,取代之前版本中的定时器*/
11 remaining_time = REFRESH_RATE;/*10ms*/
12 if (is->show_mode != SHOW_MODE_NONE && (!is->paused || is->force_refresh))
13 video_refresh(is, &remaining_time);
14 SDL_PumpEvents();
15 }
16 }
然后接下来,我们来看看video_refresh函数里面做了些什么事情吧!
代码如下:
1 /* called to display each frame */
2 static void video_refresh(void *opaque, double *remaining_time)
3 {
4 VideoState *is = opaque;
5 VideoPicture *vp;
6 double time;
7
8 SubPicture *sp, *sp2;
9
10 if (!is->paused && get_master_sync_type(is) == AV_SYNC_EXTERNAL_CLOCK && is->realtime)/*如果用外部时钟同步的话*/
11 check_external_clock_speed(is);
12
13 if (!display_disable && is->show_mode != SHOW_MODE_VIDEO && is->audio_st) {
14 time = av_gettime() / 1000000.0;
15 if (is->force_refresh || is->last_vis_time + rdftspeed < time) {/*强制刷新视频*/
16 video_display(is);
17 is->last_vis_time = time;/*记录本次的时间*/
18 }
19 *remaining_time = FFMIN(*remaining_time, is->last_vis_time + rdftspeed - time);
20 }
21
22 if (is->video_st) {
23 int redisplay = 0;
24 if (is->force_refresh)
25 redisplay = pictq_prev_picture(is);
26 retry:
27 if (is->pictq_size == 0) {/*如果缓冲区没有数据*/
28 SDL_LockMutex(is->pictq_mutex);
29 if (is->frame_last_dropped_pts != AV_NOPTS_VALUE && is->frame_last_dropped_pts > is->frame_last_pts) {
30 update_video_pts(is, is->frame_last_dropped_pts, is->frame_last_dropped_pos, is->frame_last_dropped_serial);
31 is->frame_last_dropped_pts = AV_NOPTS_VALUE;
32 }
33 SDL_UnlockMutex(is->pictq_mutex);
34 // nothing to do, no picture to display in the queue
35 } else {
36 double last_duration, duration, delay;
37 /* dequeue the picture */
38 vp = &is->pictq[is->pictq_rindex];
39
40 if (vp->serial != is->videoq.serial) {
41 pictq_next_picture(is);
42 redisplay = 0;
43 goto retry;
44 }
45
46 if (is->paused)
47 goto display;
48
49 /* compute nominal last_duration *//*通过计算当前要显示的帧和上一帧pts的差来预测当期帧显示时间---预测--->下一帧的到来时间*/
50 last_duration = vp->pts - is->frame_last_pts;/*计算上一帧的显示时间(名义上)*/
51 if (!isnan(last_duration) && last_duration > 0 && last_duration < is->max_frame_duration) {/*判断上一帧显示的时间是否在范围内*/
52 /* if duration of the last frame was sane, update last_duration in video state */
53 is->frame_last_duration = last_duration;/*更新一帧的持续显示时间*/
54 }
55 if (redisplay)
56 delay = 0.0;
57 else
58 delay = compute_target_delay(is->frame_last_duration, is);/*通过上一帧的情况来预测本次的情况,这样可以得到下一帧的到来时间*/
59
60 time= av_gettime()/1000000.0;
61 if (time < is->frame_timer + delay && !redisplay) {/**/
62 *remaining_time = FFMIN(is->frame_timer + delay - time, *remaining_time);
63 return;
64 }
65
66 is->frame_timer += delay;
67 if (delay > 0 && time - is->frame_timer > AV_SYNC_THRESHOLD_MAX)
68 is->frame_timer = time;
69
70 SDL_LockMutex(is->pictq_mutex);
71 if (!redisplay && !isnan(vp->pts))
72 update_video_pts(is, vp->pts, vp->pos, vp->serial);/*更新当前帧pts和pos*/
73 SDL_UnlockMutex(is->pictq_mutex);
74
75 if (is->pictq_size > 1) {/*如果缓冲中帧数比较多的时候,例如下一帧也已经到了*/
76 VideoPicture *nextvp = &is->pictq[(is->pictq_rindex + 1) % VIDEO_PICTURE_QUEUE_SIZE];
77 duration = nextvp->pts - vp->pts;/*这个时候,应该用已经在缓存中的下一帧pts-当前pts来真实计算当前持续显示时间*/
78 if(!is->step && (redisplay || framedrop>0 || (framedrop && get_master_sync_type(is) != AV_SYNC_VIDEO_MASTER)) && time > is->frame_timer + duration){/*如果延迟时间超过一帧了,就采取丢掉当前帧*/
79 if (!redisplay)
80 is->frame_drops_late++;
81 pictq_next_picture(is);/*采取丢帧策略,丢弃迟来的帧,取下一帧*/
82 redisplay = 0;
83 goto retry;
84 }
85 }
86
87 if (is->subtitle_st) {
88 while (is->subpq_size > 0) {
89 sp = &is->subpq[is->subpq_rindex];
90
91 if (is->subpq_size > 1)
92 sp2 = &is->subpq[(is->subpq_rindex + 1) % SUBPICTURE_QUEUE_SIZE];
93 else
94 sp2 = NULL;
95
96 if (sp->serial != is->subtitleq.serial
97 || (is->vidclk.pts > (sp->pts + ((float) sp->sub.end_display_time / 1000)))
98 || (sp2 && is->vidclk.pts > (sp2->pts + ((float) sp2->sub.start_display_time / 1000))))
99 {
100 free_subpicture(sp);
101
102 /* update queue size and signal for next picture */
103 if (++is->subpq_rindex == SUBPICTURE_QUEUE_SIZE)
104 is->subpq_rindex = 0;
105
106 SDL_LockMutex(is->subpq_mutex);
107 is->subpq_size--;
108 SDL_CondSignal(is->subpq_cond);
109 SDL_UnlockMutex(is->subpq_mutex);
110 } else {
111 break;
112 }
113 }
114 }
115
116 display:
117 /* display picture */
118 if (!display_disable && is->show_mode == SHOW_MODE_VIDEO)
119 video_display(is);
120
121 pictq_next_picture(is);
122
123 if (is->step && !is->paused)
124 stream_toggle_pause(is);
125 }
126 }
127 is->force_refresh = 0;
128 if (show_status) {/*显示状态*/
129 static int64_t last_time;
130 int64_t cur_time;
131 int aqsize, vqsize, sqsize;
132 double av_diff;
133
134 cur_time = av_gettime();
135 if (!last_time || (cur_time - last_time) >= 30000) {
136 aqsize = 0;
137 vqsize = 0;
138 sqsize = 0;
139 if (is->audio_st)
140 aqsize = is->audioq.size;
141 if (is->video_st)
142 vqsize = is->videoq.size;
143 if (is->subtitle_st)
144 sqsize = is->subtitleq.size;
145 av_diff = 0;
146 if (is->audio_st && is->video_st)
147 av_diff = get_clock(&is->audclk) - get_clock(&is->vidclk);
148 else if (is->video_st)
149 av_diff = get_master_clock(is) - get_clock(&is->vidclk);
150 else if (is->audio_st)
151 av_diff = get_master_clock(is) - get_clock(&is->audclk);
152 av_log(NULL, AV_LOG_INFO,
153 "%7.2f %s:%7.3f fd=%4d aq=%5dKB vq=%5dKB sq=%5dB f=%"PRId64"/%"PRId64" \r",
154 get_master_clock(is),
155 (is->audio_st && is->video_st) ? "A-V" : (is->video_st ? "M-V" : (is->audio_st ? "M-A" : " ")),
156 av_diff,
157 is->frame_drops_early + is->frame_drops_late,
158 aqsize / 1024,
159 vqsize / 1024,
160 sqsize,
161 is->video_st ? is->video_st->codec->pts_correction_num_faulty_dts : 0,
162 is->video_st ? is->video_st->codec->pts_correction_num_faulty_pts : 0);
163 fflush(stdout);
164 last_time = cur_time;
165 }
166 }
167 }
首先说明一下,这里在ffplay里面默认模式是使用音频做主时钟源。
其中上面加红色的代码是主要的策略:
他通过计算当前这一帧vp->pts和前面那一帧的pts之差来得到上一帧的显示时间。
然后再根据这个上面计算得到的上一帧的显示时间来估算预测计算当前这一帧的显示时间,这样就可以得到预测下一帧的pts时间了。
这里预测下一帧的出现时间,刷新时间,调用了compute_target_delay来进行处理:
代码如下:compute_target_delay
1 static double compute_target_delay(double delay, VideoState *is)
2 {
3 double sync_threshold, diff;
4
5 /* update delay to follow master synchronisation source */
6 if (get_master_sync_type(is) != AV_SYNC_VIDEO_MASTER) {
7 /* if video is slave, we try to correct big delays by
8 duplicating or deleting a frame *//*我们通过复制和删除一帧来纠正大的延时*/
9 diff = get_clock(&is->vidclk) - get_master_clock(is);
10
11 /* skip or repeat frame. We take into account the
12 delay to compute the threshold. I still don‘t know
13 if it is the best guess */
14 sync_threshold = FFMAX(AV_SYNC_THRESHOLD_MIN, FFMIN(AV_SYNC_THRESHOLD_MAX, delay));
15 if (!isnan(diff) && fabs(diff) < is->max_frame_duration) {
16 if (diff <= -sync_threshold)/*当前视频帧落后于主时钟源*/
17 {
18 delay = FFMAX(0, delay + diff);
19 }
20 else if (diff >= sync_threshold && delay > AV_SYNC_FRAMEDUP_THRESHOLD)/*视频帧超前,但If a frame duration is longer than this, it will not be duplicated to compensate AV sync*/
21 { /*大概意思是:本来当视频帧超前的时候,
22 我们应该要选择重复该帧或者下面的2倍延时(即加重延时的策略),
23 但因为该帧的显示时间大于显示更新门槛,
24 所以这个时候不应该以该帧做同步*/
25 delay = delay + diff;
26 }
27 else if (diff >= sync_threshold)
28 {
29 delay = 2 * delay;/*采取加倍延时*/
30 }
31 }
32 }
33
34 av_dlog(NULL, "video: delay=%0.3f A-V=%f\n",
35 delay, -diff);
36
37 return delay;
38 }
在这个函数里面通过得带视频时间和参考时间之间的差值diff,然后再结合diff的情况来处理delay。
原文:http://www.cnblogs.com/lihaiping/p/4034097.html