Note: We no longer publish the latest version of our code here. We primarily use a kumc-bmi github organization. The heron ETL repository, in particular, is not public. Peers in the informatics community should see MultiSiteDev for details on requesting access.

source: webrtc/webrtc/modules/video_coding/main/source/jitter_buffer.cc @ 0:4bda6873e34c

pub_scrub_3792 tip
Last change on this file since 0:4bda6873e34c was 0:4bda6873e34c, checked in by Michael Prittie <mprittie@…>, 6 years ago

Scrubbed password for publication.

File size: 46.6 KB
Line 
1/*
2 *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 *
4 *  Use of this source code is governed by a BSD-style license
5 *  that can be found in the LICENSE file in the root of the source
6 *  tree. An additional intellectual property rights grant can be found
7 *  in the file PATENTS.  All contributing project authors may
8 *  be found in the AUTHORS file in the root of the source tree.
9 */
10#include "webrtc/modules/video_coding/main/source/jitter_buffer.h"
11
12#include <assert.h>
13
14#include <algorithm>
15#include <utility>
16
17#include "webrtc/modules/video_coding/main/interface/video_coding.h"
18#include "webrtc/modules/video_coding/main/source/frame_buffer.h"
19#include "webrtc/modules/video_coding/main/source/inter_frame_delay.h"
20#include "webrtc/modules/video_coding/main/source/internal_defines.h"
21#include "webrtc/modules/video_coding/main/source/jitter_buffer_common.h"
22#include "webrtc/modules/video_coding/main/source/jitter_estimator.h"
23#include "webrtc/modules/video_coding/main/source/packet.h"
24#include "webrtc/system_wrappers/interface/clock.h"
25#include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
26#include "webrtc/system_wrappers/interface/event_wrapper.h"
27#include "webrtc/system_wrappers/interface/logging.h"
28#include "webrtc/system_wrappers/interface/trace.h"
29#include "webrtc/system_wrappers/interface/trace_event.h"
30
31namespace webrtc {
32
33// Use this rtt if no value has been reported.
34static const uint32_t kDefaultRtt = 200;
35
36typedef std::pair<uint32_t, VCMFrameBuffer*> FrameListPair;
37
38bool IsKeyFrame(FrameListPair pair) {
39  return pair.second->FrameType() == kVideoFrameKey;
40}
41
42bool HasNonEmptyState(FrameListPair pair) {
43  return pair.second->GetState() != kStateEmpty;
44}
45
46void FrameList::InsertFrame(VCMFrameBuffer* frame) {
47  insert(rbegin().base(), FrameListPair(frame->TimeStamp(), frame));
48}
49
50VCMFrameBuffer* FrameList::FindFrame(uint32_t timestamp) const {
51  FrameList::const_iterator it = find(timestamp);
52  if (it == end())
53    return NULL;
54  return it->second;
55}
56
57VCMFrameBuffer* FrameList::PopFrame(uint32_t timestamp) {
58  FrameList::iterator it = find(timestamp);
59  if (it == end())
60    return NULL;
61  VCMFrameBuffer* frame = it->second;
62  erase(it);
63  return frame;
64}
65
66VCMFrameBuffer* FrameList::Front() const {
67  return begin()->second;
68}
69
70VCMFrameBuffer* FrameList::Back() const {
71  return rbegin()->second;
72}
73
74int FrameList::RecycleFramesUntilKeyFrame(FrameList::iterator* key_frame_it,
75                                          UnorderedFrameList* free_frames) {
76  int drop_count = 0;
77  FrameList::iterator it = begin();
78  while (!empty()) {
79    // Throw at least one frame.
80    WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding, -1,
81                 "Recycling: type=%s, low seqnum=%u",
82                 it->second->FrameType() == kVideoFrameKey ?
83                 "key" : "delta", it->second->GetLowSeqNum());
84    it->second->Reset();
85    free_frames->push_back(it->second);
86    erase(it++);
87    ++drop_count;
88    if (it != end() && it->second->FrameType() == kVideoFrameKey) {
89      *key_frame_it = it;
90      return drop_count;
91    }
92  }
93  *key_frame_it = end();
94  return drop_count;
95}
96
97int FrameList::CleanUpOldOrEmptyFrames(VCMDecodingState* decoding_state,
98                                       UnorderedFrameList* free_frames) {
99  int drop_count = 0;
100  while (!empty()) {
101    VCMFrameBuffer* oldest_frame = Front();
102    bool remove_frame = false;
103    if (oldest_frame->GetState() == kStateEmpty && size() > 1) {
104      // This frame is empty, try to update the last decoded state and drop it
105      // if successful.
106      remove_frame = decoding_state->UpdateEmptyFrame(oldest_frame);
107    } else {
108      remove_frame = decoding_state->IsOldFrame(oldest_frame);
109    }
110    if (!remove_frame) {
111      break;
112    }
113    free_frames->push_back(oldest_frame);
114    ++drop_count;
115    TRACE_EVENT_INSTANT1("webrtc", "JB::OldOrEmptyFrameDropped", "timestamp",
116                         oldest_frame->TimeStamp());
117    erase(begin());
118  }
119  return drop_count;
120}
121
122void FrameList::Reset(UnorderedFrameList* free_frames) {
123  while (!empty()) {
124    begin()->second->Reset();
125    free_frames->push_back(begin()->second);
126    erase(begin());
127  }
128}
129
130VCMJitterBuffer::VCMJitterBuffer(Clock* clock,
131                                 EventFactory* event_factory,
132                                 int vcm_id,
133                                 int receiver_id,
134                                 bool master)
135    : vcm_id_(vcm_id),
136      receiver_id_(receiver_id),
137      clock_(clock),
138      running_(false),
139      crit_sect_(CriticalSectionWrapper::CreateCriticalSection()),
140      master_(master),
141      frame_event_(event_factory->CreateEvent()),
142      packet_event_(event_factory->CreateEvent()),
143      max_number_of_frames_(kStartNumberOfFrames),
144      frame_buffers_(),
145      free_frames_(),
146      decodable_frames_(),
147      incomplete_frames_(),
148      last_decoded_state_(),
149      first_packet_since_reset_(true),
150      incoming_frame_rate_(0),
151      incoming_frame_count_(0),
152      time_last_incoming_frame_count_(0),
153      incoming_bit_count_(0),
154      incoming_bit_rate_(0),
155      drop_count_(0),
156      num_consecutive_old_frames_(0),
157      num_consecutive_old_packets_(0),
158      num_discarded_packets_(0),
159      jitter_estimate_(vcm_id, receiver_id),
160      inter_frame_delay_(clock_->TimeInMilliseconds()),
161      rtt_ms_(kDefaultRtt),
162      nack_mode_(kNoNack),
163      low_rtt_nack_threshold_ms_(-1),
164      high_rtt_nack_threshold_ms_(-1),
165      missing_sequence_numbers_(SequenceNumberLessThan()),
166      nack_seq_nums_(),
167      max_nack_list_size_(0),
168      max_packet_age_to_nack_(0),
169      max_incomplete_time_ms_(0),
170      decode_error_mode_(kNoErrors),
171      average_packets_per_frame_(0.0f),
172      frame_counter_(0) {
173  memset(frame_buffers_, 0, sizeof(frame_buffers_));
174
175  for (int i = 0; i < kStartNumberOfFrames; i++) {
176    frame_buffers_[i] = new VCMFrameBuffer();
177    free_frames_.push_back(frame_buffers_[i]);
178  }
179}
180
181VCMJitterBuffer::~VCMJitterBuffer() {
182  Stop();
183  for (int i = 0; i < kMaxNumberOfFrames; i++) {
184    if (frame_buffers_[i]) {
185      delete frame_buffers_[i];
186    }
187  }
188  delete crit_sect_;
189}
190
191void VCMJitterBuffer::CopyFrom(const VCMJitterBuffer& rhs) {
192  if (this != &rhs) {
193    crit_sect_->Enter();
194    rhs.crit_sect_->Enter();
195    vcm_id_ = rhs.vcm_id_;
196    receiver_id_ = rhs.receiver_id_;
197    running_ = rhs.running_;
198    master_ = !rhs.master_;
199    max_number_of_frames_ = rhs.max_number_of_frames_;
200    incoming_frame_rate_ = rhs.incoming_frame_rate_;
201    incoming_frame_count_ = rhs.incoming_frame_count_;
202    time_last_incoming_frame_count_ = rhs.time_last_incoming_frame_count_;
203    incoming_bit_count_ = rhs.incoming_bit_count_;
204    incoming_bit_rate_ = rhs.incoming_bit_rate_;
205    drop_count_ = rhs.drop_count_;
206    num_consecutive_old_frames_ = rhs.num_consecutive_old_frames_;
207    num_consecutive_old_packets_ = rhs.num_consecutive_old_packets_;
208    num_discarded_packets_ = rhs.num_discarded_packets_;
209    jitter_estimate_ = rhs.jitter_estimate_;
210    inter_frame_delay_ = rhs.inter_frame_delay_;
211    waiting_for_completion_ = rhs.waiting_for_completion_;
212    rtt_ms_ = rhs.rtt_ms_;
213    first_packet_since_reset_ = rhs.first_packet_since_reset_;
214    last_decoded_state_ =  rhs.last_decoded_state_;
215    decode_error_mode_ = rhs.decode_error_mode_;
216    assert(max_nack_list_size_ == rhs.max_nack_list_size_);
217    assert(max_packet_age_to_nack_ == rhs.max_packet_age_to_nack_);
218    assert(max_incomplete_time_ms_ == rhs.max_incomplete_time_ms_);
219    receive_statistics_ = rhs.receive_statistics_;
220    nack_seq_nums_.resize(rhs.nack_seq_nums_.size());
221    missing_sequence_numbers_ = rhs.missing_sequence_numbers_;
222    latest_received_sequence_number_ = rhs.latest_received_sequence_number_;
223    average_packets_per_frame_ = rhs.average_packets_per_frame_;
224    for (int i = 0; i < kMaxNumberOfFrames; i++) {
225      if (frame_buffers_[i] != NULL) {
226        delete frame_buffers_[i];
227        frame_buffers_[i] = NULL;
228      }
229    }
230    free_frames_.clear();
231    decodable_frames_.clear();
232    incomplete_frames_.clear();
233    int i = 0;
234    for (UnorderedFrameList::const_iterator it = rhs.free_frames_.begin();
235         it != rhs.free_frames_.end(); ++it, ++i) {
236      frame_buffers_[i] = new VCMFrameBuffer;
237      free_frames_.push_back(frame_buffers_[i]);
238    }
239    CopyFrames(&decodable_frames_, rhs.decodable_frames_, &i);
240    CopyFrames(&incomplete_frames_, rhs.incomplete_frames_, &i);
241    rhs.crit_sect_->Leave();
242    crit_sect_->Leave();
243  }
244}
245
246void VCMJitterBuffer::CopyFrames(FrameList* to_list,
247    const FrameList& from_list, int* index) {
248  to_list->clear();
249  for (FrameList::const_iterator it = from_list.begin();
250       it != from_list.end(); ++it, ++*index) {
251    frame_buffers_[*index] = new VCMFrameBuffer(*it->second);
252    to_list->InsertFrame(frame_buffers_[*index]);
253  }
254}
255
256void VCMJitterBuffer::Start() {
257  CriticalSectionScoped cs(crit_sect_);
258  running_ = true;
259  incoming_frame_count_ = 0;
260  incoming_frame_rate_ = 0;
261  incoming_bit_count_ = 0;
262  incoming_bit_rate_ = 0;
263  time_last_incoming_frame_count_ = clock_->TimeInMilliseconds();
264  receive_statistics_.clear();
265
266  num_consecutive_old_frames_ = 0;
267  num_consecutive_old_packets_ = 0;
268  num_discarded_packets_ = 0;
269
270  // Start in a non-signaled state.
271  frame_event_->Reset();
272  packet_event_->Reset();
273  waiting_for_completion_.frame_size = 0;
274  waiting_for_completion_.timestamp = 0;
275  waiting_for_completion_.latest_packet_time = -1;
276  first_packet_since_reset_ = true;
277  rtt_ms_ = kDefaultRtt;
278  last_decoded_state_.Reset();
279
280  WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
281               VCMId(vcm_id_, receiver_id_), "JB(0x%x): Jitter buffer: start",
282               this);
283}
284
285void VCMJitterBuffer::Stop() {
286  crit_sect_->Enter();
287  running_ = false;
288  last_decoded_state_.Reset();
289  free_frames_.clear();
290  decodable_frames_.clear();
291  incomplete_frames_.clear();
292  // Make sure all frames are reset and free.
293  for (int i = 0; i < kMaxNumberOfFrames; i++) {
294    if (frame_buffers_[i] != NULL) {
295      static_cast<VCMFrameBuffer*>(frame_buffers_[i])->Reset();
296      free_frames_.push_back(frame_buffers_[i]);
297    }
298  }
299  crit_sect_->Leave();
300  // Make sure we wake up any threads waiting on these events.
301  frame_event_->Set();
302  packet_event_->Set();
303  WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
304               VCMId(vcm_id_, receiver_id_), "JB(0x%x): Jitter buffer: stop",
305               this);
306}
307
308bool VCMJitterBuffer::Running() const {
309  CriticalSectionScoped cs(crit_sect_);
310  return running_;
311}
312
313void VCMJitterBuffer::Flush() {
314  CriticalSectionScoped cs(crit_sect_);
315  decodable_frames_.Reset(&free_frames_);
316  incomplete_frames_.Reset(&free_frames_);
317  last_decoded_state_.Reset();  // TODO(mikhal): sync reset.
318  frame_event_->Reset();
319  packet_event_->Reset();
320  num_consecutive_old_frames_ = 0;
321  num_consecutive_old_packets_ = 0;
322  // Also reset the jitter and delay estimates
323  jitter_estimate_.Reset();
324  inter_frame_delay_.Reset(clock_->TimeInMilliseconds());
325  waiting_for_completion_.frame_size = 0;
326  waiting_for_completion_.timestamp = 0;
327  waiting_for_completion_.latest_packet_time = -1;
328  first_packet_since_reset_ = true;
329  missing_sequence_numbers_.clear();
330  WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
331               VCMId(vcm_id_, receiver_id_), "JB(0x%x): Jitter buffer: flush",
332               this);
333}
334
335// Get received key and delta frames
336std::map<FrameType, uint32_t> VCMJitterBuffer::FrameStatistics() const {
337  CriticalSectionScoped cs(crit_sect_);
338  return receive_statistics_;
339}
340
341int VCMJitterBuffer::num_discarded_packets() const {
342  CriticalSectionScoped cs(crit_sect_);
343  return num_discarded_packets_;
344}
345
346// Calculate framerate and bitrate.
347void VCMJitterBuffer::IncomingRateStatistics(unsigned int* framerate,
348                                             unsigned int* bitrate) {
349  assert(framerate);
350  assert(bitrate);
351  CriticalSectionScoped cs(crit_sect_);
352  const int64_t now = clock_->TimeInMilliseconds();
353  int64_t diff = now - time_last_incoming_frame_count_;
354  if (diff < 1000 && incoming_frame_rate_ > 0 && incoming_bit_rate_ > 0) {
355    // Make sure we report something even though less than
356    // 1 second has passed since last update.
357    *framerate = incoming_frame_rate_;
358    *bitrate = incoming_bit_rate_;
359  } else if (incoming_frame_count_ != 0) {
360    // We have received frame(s) since last call to this function
361
362    // Prepare calculations
363    if (diff <= 0) {
364      diff = 1;
365    }
366    // we add 0.5f for rounding
367    float rate = 0.5f + ((incoming_frame_count_ * 1000.0f) / diff);
368    if (rate < 1.0f) {
369      rate = 1.0f;
370    }
371
372    // Calculate frame rate
373    // Let r be rate.
374    // r(0) = 1000*framecount/delta_time.
375    // (I.e. frames per second since last calculation.)
376    // frame_rate = r(0)/2 + r(-1)/2
377    // (I.e. fr/s average this and the previous calculation.)
378    *framerate = (incoming_frame_rate_ + static_cast<unsigned int>(rate)) / 2;
379    incoming_frame_rate_ = static_cast<unsigned int>(rate);
380
381    // Calculate bit rate
382    if (incoming_bit_count_ == 0) {
383      *bitrate = 0;
384    } else {
385      *bitrate = 10 * ((100 * incoming_bit_count_) /
386                       static_cast<unsigned int>(diff));
387    }
388    incoming_bit_rate_ = *bitrate;
389
390    // Reset count
391    incoming_frame_count_ = 0;
392    incoming_bit_count_ = 0;
393    time_last_incoming_frame_count_ = now;
394
395  } else {
396    // No frames since last call
397    time_last_incoming_frame_count_ = clock_->TimeInMilliseconds();
398    *framerate = 0;
399    *bitrate = 0;
400    incoming_frame_rate_ = 0;
401    incoming_bit_rate_ = 0;
402  }
403}
404
405// Answers the question:
406// Will the packet sequence be complete if the next frame is grabbed for
407// decoding right now? That is, have we lost a frame between the last decoded
408// frame and the next, or is the next
409// frame missing one or more packets?
410bool VCMJitterBuffer::CompleteSequenceWithNextFrame() {
411  CriticalSectionScoped cs(crit_sect_);
412  // Finding oldest frame ready for decoder, check sequence number and size
413  CleanUpOldOrEmptyFrames();
414  if (!decodable_frames_.empty()) {
415    if (decodable_frames_.Front()->GetState() == kStateComplete) {
416      return true;
417    }
418  } else if (incomplete_frames_.size() <= 1) {
419    // Frame not ready to be decoded.
420    return true;
421  }
422  return false;
423}
424
425// Returns immediately or a |max_wait_time_ms| ms event hang waiting for a
426// complete frame, |max_wait_time_ms| decided by caller.
427bool VCMJitterBuffer::NextCompleteTimestamp(
428    uint32_t max_wait_time_ms, uint32_t* timestamp) {
429  crit_sect_->Enter();
430  if (!running_) {
431    crit_sect_->Leave();
432    return false;
433  }
434  CleanUpOldOrEmptyFrames();
435
436  if (decodable_frames_.empty() ||
437      decodable_frames_.Front()->GetState() != kStateComplete) {
438    const int64_t end_wait_time_ms = clock_->TimeInMilliseconds() +
439        max_wait_time_ms;
440    int64_t wait_time_ms = max_wait_time_ms;
441    while (wait_time_ms > 0) {
442      crit_sect_->Leave();
443      const EventTypeWrapper ret =
444        frame_event_->Wait(static_cast<uint32_t>(wait_time_ms));
445      crit_sect_->Enter();
446      if (ret == kEventSignaled) {
447        // Are we shutting down the jitter buffer?
448        if (!running_) {
449          crit_sect_->Leave();
450          return false;
451        }
452        // Finding oldest frame ready for decoder.
453        CleanUpOldOrEmptyFrames();
454        if (decodable_frames_.empty() ||
455            decodable_frames_.Front()->GetState() != kStateComplete) {
456          wait_time_ms = end_wait_time_ms - clock_->TimeInMilliseconds();
457        } else {
458          break;
459        }
460      } else {
461        break;
462      }
463    }
464    // Inside |crit_sect_|.
465  } else {
466    // We already have a frame, reset the event.
467    frame_event_->Reset();
468  }
469  if (decodable_frames_.empty() ||
470      decodable_frames_.Front()->GetState() != kStateComplete) {
471    crit_sect_->Leave();
472    return false;
473  }
474  *timestamp = decodable_frames_.Front()->TimeStamp();
475  crit_sect_->Leave();
476  return true;
477}
478
479bool VCMJitterBuffer::NextMaybeIncompleteTimestamp(uint32_t* timestamp) {
480  CriticalSectionScoped cs(crit_sect_);
481  if (!running_) {
482    return false;
483  }
484  if (decode_error_mode_ == kNoErrors) {
485    // No point to continue, as we are not decoding with errors.
486    return false;
487  }
488
489  CleanUpOldOrEmptyFrames();
490
491  if (decodable_frames_.empty()) {
492    return false;
493  }
494  VCMFrameBuffer* oldest_frame = decodable_frames_.Front();
495  // If we have exactly one frame in the buffer, release it only if it is
496  // complete. We know decodable_frames_ is  not empty due to the previous
497  // check.
498  if (decodable_frames_.size() == 1 && incomplete_frames_.empty()
499      && oldest_frame->GetState() != kStateComplete) {
500    return false;
501  }
502
503  *timestamp = oldest_frame->TimeStamp();
504  return true;
505}
506
507VCMEncodedFrame* VCMJitterBuffer::ExtractAndSetDecode(uint32_t timestamp) {
508  CriticalSectionScoped cs(crit_sect_);
509
510  if (!running_) {
511    return NULL;
512  }
513  // Extract the frame with the desired timestamp.
514  VCMFrameBuffer* frame = decodable_frames_.PopFrame(timestamp);
515  bool continuous = true;
516  if (!frame) {
517    frame = incomplete_frames_.PopFrame(timestamp);
518    if (frame)
519      continuous = last_decoded_state_.ContinuousFrame(frame);
520    else
521      return NULL;
522  }
523  TRACE_EVENT_ASYNC_STEP0("webrtc", "Video", timestamp, "Extract");
524  // Frame pulled out from jitter buffer, update the jitter estimate.
525  const bool retransmitted = (frame->GetNackCount() > 0);
526  if (retransmitted) {
527    jitter_estimate_.FrameNacked();
528  } else if (frame->Length() > 0) {
529    // Ignore retransmitted and empty frames.
530    if (waiting_for_completion_.latest_packet_time >= 0) {
531      UpdateJitterEstimate(waiting_for_completion_, true);
532    }
533    if (frame->GetState() == kStateComplete) {
534      UpdateJitterEstimate(*frame, false);
535    } else {
536      // Wait for this one to get complete.
537      waiting_for_completion_.frame_size = frame->Length();
538      waiting_for_completion_.latest_packet_time =
539          frame->LatestPacketTimeMs();
540      waiting_for_completion_.timestamp = frame->TimeStamp();
541    }
542  }
543
544  // The state must be changed to decoding before cleaning up zero sized
545  // frames to avoid empty frames being cleaned up and then given to the
546  // decoder. Propagates the missing_frame bit.
547  frame->PrepareForDecode(continuous);
548
549  // We have a frame - update the last decoded state and nack list.
550  last_decoded_state_.SetState(frame);
551  DropPacketsFromNackList(last_decoded_state_.sequence_num());
552
553  if ((*frame).IsSessionComplete())
554    UpdateAveragePacketsPerFrame(frame->NumPackets());
555
556  return frame;
557}
558
559// Release frame when done with decoding. Should never be used to release
560// frames from within the jitter buffer.
561void VCMJitterBuffer::ReleaseFrame(VCMEncodedFrame* frame) {
562  CriticalSectionScoped cs(crit_sect_);
563  VCMFrameBuffer* frame_buffer = static_cast<VCMFrameBuffer*>(frame);
564  if (frame_buffer) {
565    free_frames_.push_back(frame_buffer);
566  }
567}
568
569// Gets frame to use for this timestamp. If no match, get empty frame.
570VCMFrameBufferEnum VCMJitterBuffer::GetFrame(const VCMPacket& packet,
571                                             VCMFrameBuffer** frame) {
572  // Does this packet belong to an old frame?
573  if (last_decoded_state_.IsOldPacket(&packet)) {
574    // Account only for media packets.
575    if (packet.sizeBytes > 0) {
576      num_discarded_packets_++;
577      num_consecutive_old_packets_++;
578    }
579    // Update last decoded sequence number if the packet arrived late and
580    // belongs to a frame with a timestamp equal to the last decoded
581    // timestamp.
582    last_decoded_state_.UpdateOldPacket(&packet);
583    DropPacketsFromNackList(last_decoded_state_.sequence_num());
584
585    if (num_consecutive_old_packets_ > kMaxConsecutiveOldPackets) {
586      Flush();
587      return kFlushIndicator;
588    }
589    return kOldPacket;
590  }
591  num_consecutive_old_packets_ = 0;
592
593  *frame = incomplete_frames_.FindFrame(packet.timestamp);
594  if (*frame)
595    return kNoError;
596  *frame = decodable_frames_.FindFrame(packet.timestamp);
597  if (*frame)
598    return kNoError;
599
600  // No match, return empty frame.
601  *frame = GetEmptyFrame();
602  VCMFrameBufferEnum ret = kNoError;
603  if (!*frame) {
604    // No free frame! Try to reclaim some...
605    LOG_F(LS_INFO) << "Unable to get empty frame; Recycling.";
606    bool found_key_frame = RecycleFramesUntilKeyFrame();
607    *frame = GetEmptyFrame();
608    if (!*frame)
609      return kGeneralError;
610    else if (!found_key_frame)
611      ret = kFlushIndicator;
612  }
613  (*frame)->Reset();
614  return ret;
615}
616
617int64_t VCMJitterBuffer::LastPacketTime(const VCMEncodedFrame* frame,
618                                        bool* retransmitted) const {
619  assert(retransmitted);
620  CriticalSectionScoped cs(crit_sect_);
621  const VCMFrameBuffer* frame_buffer =
622      static_cast<const VCMFrameBuffer*>(frame);
623  *retransmitted = (frame_buffer->GetNackCount() > 0);
624  return frame_buffer->LatestPacketTimeMs();
625}
626
627VCMFrameBufferEnum VCMJitterBuffer::InsertPacket(const VCMPacket& packet,
628                                                 bool* retransmitted) {
629  CriticalSectionScoped cs(crit_sect_);
630
631  VCMFrameBuffer* frame = NULL;
632  const VCMFrameBufferEnum error = GetFrame(packet, &frame);
633  if (error != kNoError && frame == NULL) {
634    return error;
635  }
636
637  int64_t now_ms = clock_->TimeInMilliseconds();
638  // We are keeping track of the first and latest seq numbers, and
639  // the number of wraps to be able to calculate how many packets we expect.
640  if (first_packet_since_reset_) {
641    // Now it's time to start estimating jitter
642    // reset the delay estimate.
643    inter_frame_delay_.Reset(now_ms);
644  }
645  if (last_decoded_state_.IsOldPacket(&packet)) {
646    // This packet belongs to an old, already decoded frame, we want to update
647    // the last decoded sequence number.
648    last_decoded_state_.UpdateOldPacket(&packet);
649    drop_count_++;
650    // Flush if this happens consistently.
651    num_consecutive_old_frames_++;
652    if (num_consecutive_old_frames_ > kMaxConsecutiveOldFrames) {
653      Flush();
654      return kFlushIndicator;
655    }
656    return kNoError;
657  }
658
659  num_consecutive_old_frames_ = 0;
660
661  // Empty packets may bias the jitter estimate (lacking size component),
662  // therefore don't let empty packet trigger the following updates:
663  if (packet.frameType != kFrameEmpty) {
664    if (waiting_for_completion_.timestamp == packet.timestamp) {
665      // This can get bad if we have a lot of duplicate packets,
666      // we will then count some packet multiple times.
667      waiting_for_completion_.frame_size += packet.sizeBytes;
668      waiting_for_completion_.latest_packet_time = now_ms;
669    } else if (waiting_for_completion_.latest_packet_time >= 0 &&
670               waiting_for_completion_.latest_packet_time + 2000 <= now_ms) {
671      // A packet should never be more than two seconds late
672      UpdateJitterEstimate(waiting_for_completion_, true);
673      waiting_for_completion_.latest_packet_time = -1;
674      waiting_for_completion_.frame_size = 0;
675      waiting_for_completion_.timestamp = 0;
676    }
677  }
678
679  VCMFrameBufferStateEnum previous_state = frame->GetState();
680  // Insert packet.
681  // Check for first packet. High sequence number will be -1 if neither an empty
682  // packet nor a media packet has been inserted.
683  bool first = (frame->GetHighSeqNum() == -1);
684  FrameData frame_data;
685  frame_data.rtt_ms = rtt_ms_;
686  frame_data.rolling_average_packets_per_frame = average_packets_per_frame_;
687  VCMFrameBufferEnum buffer_return = frame->InsertPacket(packet,
688                                                         now_ms,
689                                                         decode_error_mode_,
690                                                         frame_data);
691  if (!frame->GetCountedFrame()) {
692    TRACE_EVENT_ASYNC_BEGIN1("webrtc", "Video", frame->TimeStamp(),
693                             "timestamp", frame->TimeStamp());
694  }
695
696  if (buffer_return > 0) {
697    incoming_bit_count_ += packet.sizeBytes << 3;
698    if (first_packet_since_reset_) {
699      latest_received_sequence_number_ = packet.seqNum;
700      first_packet_since_reset_ = false;
701    } else {
702      if (IsPacketRetransmitted(packet)) {
703        frame->IncrementNackCount();
704      }
705      if (!UpdateNackList(packet.seqNum)) {
706        LOG_F(LS_INFO) << "Requesting key frame due to flushed NACK list.";
707        buffer_return = kFlushIndicator;
708      }
709      latest_received_sequence_number_ = LatestSequenceNumber(
710          latest_received_sequence_number_, packet.seqNum);
711    }
712  }
713
714  // Is the frame already in the decodable list?
715  bool update_decodable_list = (previous_state != kStateDecodable &&
716      previous_state != kStateComplete);
717  bool continuous = IsContinuous(*frame);
718  switch (buffer_return) {
719    case kGeneralError:
720    case kTimeStampError:
721    case kSizeError: {
722      // This frame will be cleaned up later from the frame list.
723      frame->Reset();
724      break;
725    }
726    case kCompleteSession: {
727      if (update_decodable_list) {
728        if (master_) {
729          // Only trace the primary jitter buffer to make it possible to parse
730          // and plot the trace file.
731          WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
732                       VCMId(vcm_id_, receiver_id_),
733                       "JB(0x%x) FB(0x%x): Complete frame added to jitter"
734                       "buffer, size:%d type %d",
735                       this, frame, frame->Length(), frame->FrameType());
736        }
737        CountFrame(*frame);
738        frame->SetCountedFrame(true);
739        if (continuous) {
740          // Signal that we have a complete session.
741          frame_event_->Set();
742        }
743      }
744    }
745    // Note: There is no break here - continuing to kDecodableSession.
746    case kDecodableSession: {
747      *retransmitted = (frame->GetNackCount() > 0);
748      // Signal that we have a received packet.
749      packet_event_->Set();
750      if (!update_decodable_list) {
751        break;
752      }
753      if (continuous) {
754        if (!first) {
755          incomplete_frames_.PopFrame(packet.timestamp);
756        }
757        decodable_frames_.InsertFrame(frame);
758        FindAndInsertContinuousFrames(*frame);
759      } else if (first) {
760        incomplete_frames_.InsertFrame(frame);
761      }
762      break;
763    }
764    case kIncomplete: {
765      // No point in storing empty continuous frames.
766      if (frame->GetState() == kStateEmpty &&
767          last_decoded_state_.UpdateEmptyFrame(frame)) {
768        free_frames_.push_back(frame);
769        frame->Reset();
770        frame = NULL;
771        return kNoError;
772      } else if (first) {
773        incomplete_frames_.InsertFrame(frame);
774      }
775      // Signal that we have received a packet.
776      packet_event_->Set();
777      break;
778    }
779    case kNoError:
780    case kOutOfBoundsPacket:
781    case kDuplicatePacket: {
782      break;
783    }
784    case kFlushIndicator:
785      return kFlushIndicator;
786    default: {
787      assert(false && "JitterBuffer::InsertPacket: Undefined value");
788    }
789  }
790  return buffer_return;
791}
792
793bool VCMJitterBuffer::IsContinuousInState(const VCMFrameBuffer& frame,
794    const VCMDecodingState& decoding_state) const {
795  if (decode_error_mode_ == kWithErrors)
796    return true;
797  // Is this frame (complete or decodable) and continuous?
798  // kStateDecodable will never be set when decode_error_mode_ is false
799  // as SessionInfo determines this state based on the error mode (and frame
800  // completeness).
801  if ((frame.GetState() == kStateComplete ||
802       frame.GetState() == kStateDecodable) &&
803       decoding_state.ContinuousFrame(&frame)) {
804    return true;
805  } else {
806    return false;
807  }
808}
809
810bool VCMJitterBuffer::IsContinuous(const VCMFrameBuffer& frame) const {
811  if (IsContinuousInState(frame, last_decoded_state_)) {
812    return true;
813  }
814  VCMDecodingState decoding_state;
815  decoding_state.CopyFrom(last_decoded_state_);
816  for (FrameList::const_iterator it = decodable_frames_.begin();
817       it != decodable_frames_.end(); ++it)  {
818    VCMFrameBuffer* decodable_frame = it->second;
819    if (IsNewerTimestamp(decodable_frame->TimeStamp(), frame.TimeStamp())) {
820      break;
821    }
822    decoding_state.SetState(decodable_frame);
823    if (IsContinuousInState(frame, decoding_state)) {
824      return true;
825    }
826  }
827  return false;
828}
829
830void VCMJitterBuffer::FindAndInsertContinuousFrames(
831    const VCMFrameBuffer& new_frame) {
832  VCMDecodingState decoding_state;
833  decoding_state.CopyFrom(last_decoded_state_);
834  decoding_state.SetState(&new_frame);
835  // When temporal layers are available, we search for a complete or decodable
836  // frame until we hit one of the following:
837  // 1. Continuous base or sync layer.
838  // 2. The end of the list was reached.
839  for (FrameList::iterator it = incomplete_frames_.begin();
840       it != incomplete_frames_.end();)  {
841    VCMFrameBuffer* frame = it->second;
842    if (IsNewerTimestamp(new_frame.TimeStamp(), frame->TimeStamp())) {
843      ++it;
844      continue;
845    }
846    if (IsContinuousInState(*frame, decoding_state)) {
847      decodable_frames_.InsertFrame(frame);
848      incomplete_frames_.erase(it++);
849      decoding_state.SetState(frame);
850    } else if (frame->TemporalId() <= 0) {
851      break;
852    } else {
853      ++it;
854    }
855  }
856}
857
858uint32_t VCMJitterBuffer::EstimatedJitterMs() {
859  CriticalSectionScoped cs(crit_sect_);
860  // Compute RTT multiplier for estimation.
861  // low_rtt_nackThresholdMs_ == -1 means no FEC.
862  double rtt_mult = 1.0f;
863  if (low_rtt_nack_threshold_ms_ >= 0 &&
864      static_cast<int>(rtt_ms_) >= low_rtt_nack_threshold_ms_) {
865    // For RTTs above low_rtt_nack_threshold_ms_ we don't apply extra delay
866    // when waiting for retransmissions.
867    rtt_mult = 0.0f;
868  }
869  return jitter_estimate_.GetJitterEstimate(rtt_mult);
870}
871
872void VCMJitterBuffer::UpdateRtt(uint32_t rtt_ms) {
873  CriticalSectionScoped cs(crit_sect_);
874  rtt_ms_ = rtt_ms;
875  jitter_estimate_.UpdateRtt(rtt_ms);
876}
877
878void VCMJitterBuffer::SetNackMode(VCMNackMode mode,
879                                  int low_rtt_nack_threshold_ms,
880                                  int high_rtt_nack_threshold_ms) {
881  CriticalSectionScoped cs(crit_sect_);
882  nack_mode_ = mode;
883  if (mode == kNoNack) {
884    missing_sequence_numbers_.clear();
885  }
886  assert(low_rtt_nack_threshold_ms >= -1 && high_rtt_nack_threshold_ms >= -1);
887  assert(high_rtt_nack_threshold_ms == -1 ||
888         low_rtt_nack_threshold_ms <= high_rtt_nack_threshold_ms);
889  assert(low_rtt_nack_threshold_ms > -1 || high_rtt_nack_threshold_ms == -1);
890  low_rtt_nack_threshold_ms_ = low_rtt_nack_threshold_ms;
891  high_rtt_nack_threshold_ms_ = high_rtt_nack_threshold_ms;
892  // Don't set a high start rtt if high_rtt_nack_threshold_ms_ is used, to not
893  // disable NACK in hybrid mode.
894  if (rtt_ms_ == kDefaultRtt && high_rtt_nack_threshold_ms_ != -1) {
895    rtt_ms_ = 0;
896  }
897  if (!WaitForRetransmissions()) {
898    jitter_estimate_.ResetNackCount();
899  }
900}
901
902void VCMJitterBuffer::SetNackSettings(size_t max_nack_list_size,
903                                      int max_packet_age_to_nack,
904                                      int max_incomplete_time_ms) {
905  CriticalSectionScoped cs(crit_sect_);
906  assert(max_packet_age_to_nack >= 0);
907  assert(max_incomplete_time_ms_ >= 0);
908  max_nack_list_size_ = max_nack_list_size;
909  max_packet_age_to_nack_ = max_packet_age_to_nack;
910  max_incomplete_time_ms_ = max_incomplete_time_ms;
911  nack_seq_nums_.resize(max_nack_list_size_);
912}
913
914VCMNackMode VCMJitterBuffer::nack_mode() const {
915  CriticalSectionScoped cs(crit_sect_);
916  return nack_mode_;
917}
918
919int VCMJitterBuffer::NonContinuousOrIncompleteDuration() {
920  if (incomplete_frames_.empty()) {
921    return 0;
922  }
923  uint32_t start_timestamp = incomplete_frames_.Front()->TimeStamp();
924  if (!decodable_frames_.empty()) {
925    start_timestamp = decodable_frames_.Back()->TimeStamp();
926  }
927  return incomplete_frames_.Back()->TimeStamp() - start_timestamp;
928}
929
930uint16_t VCMJitterBuffer::EstimatedLowSequenceNumber(
931    const VCMFrameBuffer& frame) const {
932  assert(frame.GetLowSeqNum() >= 0);
933  if (frame.HaveFirstPacket())
934    return frame.GetLowSeqNum();
935
936  // This estimate is not accurate if more than one packet with lower sequence
937  // number is lost.
938  return frame.GetLowSeqNum() - 1;
939}
940
941uint16_t* VCMJitterBuffer::GetNackList(uint16_t* nack_list_size,
942                                       bool* request_key_frame) {
943  CriticalSectionScoped cs(crit_sect_);
944  *request_key_frame = false;
945  if (nack_mode_ == kNoNack) {
946    *nack_list_size = 0;
947    return NULL;
948  }
949  if (last_decoded_state_.in_initial_state()) {
950    VCMFrameBuffer* next_frame =  NextFrame();
951    const bool first_frame_is_key = next_frame &&
952        next_frame->FrameType() == kVideoFrameKey &&
953        next_frame->HaveFirstPacket();
954    if (!first_frame_is_key) {
955      bool have_non_empty_frame = decodable_frames_.end() != find_if(
956          decodable_frames_.begin(), decodable_frames_.end(),
957          HasNonEmptyState);
958      if (!have_non_empty_frame) {
959        have_non_empty_frame = incomplete_frames_.end() != find_if(
960            incomplete_frames_.begin(), incomplete_frames_.end(),
961            HasNonEmptyState);
962      }
963      if (have_non_empty_frame)
964        LOG_F(LS_INFO) << "First frame is not key; Recycling.";
965      bool found_key_frame = RecycleFramesUntilKeyFrame();
966      if (!found_key_frame) {
967        *request_key_frame = have_non_empty_frame;
968        *nack_list_size = 0;
969        return NULL;
970      }
971    }
972  }
973  if (TooLargeNackList()) {
974    *request_key_frame = !HandleTooLargeNackList();
975  }
976  if (max_incomplete_time_ms_ > 0) {
977    int non_continuous_incomplete_duration =
978        NonContinuousOrIncompleteDuration();
979    if (non_continuous_incomplete_duration > 90 * max_incomplete_time_ms_) {
980      LOG_F(LS_INFO) << "Too long non-decodable duration: " <<
981          non_continuous_incomplete_duration << " > " <<
982          90 * max_incomplete_time_ms_;
983      FrameList::reverse_iterator rit = find_if(incomplete_frames_.rbegin(),
984          incomplete_frames_.rend(), IsKeyFrame);
985      if (rit == incomplete_frames_.rend()) {
986        // Request a key frame if we don't have one already.
987        *request_key_frame = true;
988        *nack_list_size = 0;
989        return NULL;
990      } else {
991        // Skip to the last key frame. If it's incomplete we will start
992        // NACKing it.
993        // Note that the estimated low sequence number is correct for VP8
994        // streams because only the first packet of a key frame is marked.
995        last_decoded_state_.Reset();
996        DropPacketsFromNackList(EstimatedLowSequenceNumber(*rit->second));
997      }
998    }
999  }
1000  unsigned int i = 0;
1001  SequenceNumberSet::iterator it = missing_sequence_numbers_.begin();
1002  for (; it != missing_sequence_numbers_.end(); ++it, ++i) {
1003    nack_seq_nums_[i] = *it;
1004  }
1005  *nack_list_size = i;
1006  return &nack_seq_nums_[0];
1007}
1008
1009void VCMJitterBuffer::SetDecodeErrorMode(VCMDecodeErrorMode error_mode) {
1010  CriticalSectionScoped cs(crit_sect_);
1011  decode_error_mode_ = error_mode;
1012}
1013
1014VCMFrameBuffer* VCMJitterBuffer::NextFrame() const {
1015  if (!decodable_frames_.empty())
1016    return decodable_frames_.Front();
1017  if (!incomplete_frames_.empty())
1018    return incomplete_frames_.Front();
1019  return NULL;
1020}
1021
1022bool VCMJitterBuffer::UpdateNackList(uint16_t sequence_number) {
1023  if (nack_mode_ == kNoNack) {
1024    return true;
1025  }
1026  // Make sure we don't add packets which are already too old to be decoded.
1027  if (!last_decoded_state_.in_initial_state()) {
1028    latest_received_sequence_number_ = LatestSequenceNumber(
1029        latest_received_sequence_number_,
1030        last_decoded_state_.sequence_num());
1031  }
1032  if (IsNewerSequenceNumber(sequence_number,
1033                            latest_received_sequence_number_)) {
1034    // Push any missing sequence numbers to the NACK list.
1035    for (uint16_t i = latest_received_sequence_number_ + 1;
1036         IsNewerSequenceNumber(sequence_number, i); ++i) {
1037      missing_sequence_numbers_.insert(missing_sequence_numbers_.end(), i);
1038      TRACE_EVENT_INSTANT1("webrtc", "AddNack", "seqnum", i);
1039    }
1040    if (TooLargeNackList() && !HandleTooLargeNackList()) {
1041      return false;
1042    }
1043    if (MissingTooOldPacket(sequence_number) &&
1044        !HandleTooOldPackets(sequence_number)) {
1045      return false;
1046    }
1047  } else {
1048    missing_sequence_numbers_.erase(sequence_number);
1049    TRACE_EVENT_INSTANT1("webrtc", "RemoveNack", "seqnum", sequence_number);
1050  }
1051  return true;
1052}
1053
1054bool VCMJitterBuffer::TooLargeNackList() const {
1055  return missing_sequence_numbers_.size() > max_nack_list_size_;
1056}
1057
1058bool VCMJitterBuffer::HandleTooLargeNackList() {
1059  // Recycle frames until the NACK list is small enough. It is likely cheaper to
1060  // request a key frame than to retransmit this many missing packets.
1061  LOG_F(LS_INFO) << "NACK list has grown too large: " <<
1062      missing_sequence_numbers_.size() << " > " << max_nack_list_size_;
1063  bool key_frame_found = false;
1064  while (TooLargeNackList()) {
1065    key_frame_found = RecycleFramesUntilKeyFrame();
1066  }
1067  return key_frame_found;
1068}
1069
1070bool VCMJitterBuffer::MissingTooOldPacket(
1071    uint16_t latest_sequence_number) const {
1072  if (missing_sequence_numbers_.empty()) {
1073    return false;
1074  }
1075  const uint16_t age_of_oldest_missing_packet = latest_sequence_number -
1076      *missing_sequence_numbers_.begin();
1077  // Recycle frames if the NACK list contains too old sequence numbers as
1078  // the packets may have already been dropped by the sender.
1079  return age_of_oldest_missing_packet > max_packet_age_to_nack_;
1080}
1081
1082bool VCMJitterBuffer::HandleTooOldPackets(uint16_t latest_sequence_number) {
1083  bool key_frame_found = false;
1084  const uint16_t age_of_oldest_missing_packet = latest_sequence_number -
1085      *missing_sequence_numbers_.begin();
1086  LOG_F(LS_INFO) << "NACK list contains too old sequence numbers: " <<
1087      age_of_oldest_missing_packet << " > " << max_packet_age_to_nack_;
1088  while (MissingTooOldPacket(latest_sequence_number)) {
1089    key_frame_found = RecycleFramesUntilKeyFrame();
1090  }
1091  return key_frame_found;
1092}
1093
1094void VCMJitterBuffer::DropPacketsFromNackList(
1095    uint16_t last_decoded_sequence_number) {
1096  // Erase all sequence numbers from the NACK list which we won't need any
1097  // longer.
1098  missing_sequence_numbers_.erase(missing_sequence_numbers_.begin(),
1099                                  missing_sequence_numbers_.upper_bound(
1100                                      last_decoded_sequence_number));
1101}
1102
1103int64_t VCMJitterBuffer::LastDecodedTimestamp() const {
1104  CriticalSectionScoped cs(crit_sect_);
1105  return last_decoded_state_.time_stamp();
1106}
1107
1108void VCMJitterBuffer::RenderBufferSize(uint32_t* timestamp_start,
1109                                       uint32_t* timestamp_end) {
1110  CriticalSectionScoped cs(crit_sect_);
1111  CleanUpOldOrEmptyFrames();
1112  *timestamp_start = 0;
1113  *timestamp_end = 0;
1114  if (decodable_frames_.empty()) {
1115    return;
1116  }
1117  *timestamp_start = decodable_frames_.Front()->TimeStamp();
1118  *timestamp_end = decodable_frames_.Back()->TimeStamp();
1119}
1120
1121VCMFrameBuffer* VCMJitterBuffer::GetEmptyFrame() {
1122  if (free_frames_.empty()) {
1123    if (!TryToIncreaseJitterBufferSize()) {
1124      return NULL;
1125    }
1126  }
1127  VCMFrameBuffer* frame = free_frames_.front();
1128  free_frames_.pop_front();
1129  return frame;
1130}
1131
1132bool VCMJitterBuffer::TryToIncreaseJitterBufferSize() {
1133  if (max_number_of_frames_ >= kMaxNumberOfFrames)
1134    return false;
1135  VCMFrameBuffer* new_frame = new VCMFrameBuffer();
1136  frame_buffers_[max_number_of_frames_] = new_frame;
1137  free_frames_.push_back(new_frame);
1138  ++max_number_of_frames_;
1139  WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
1140               VCMId(vcm_id_, receiver_id_),
1141               "JB(0x%x) FB(0x%x): Jitter buffer  increased to:%d frames",
1142               this, new_frame, max_number_of_frames_);
1143  TRACE_COUNTER1("webrtc", "JBMaxFrames", max_number_of_frames_);
1144  return true;
1145}
1146
1147// Recycle oldest frames up to a key frame, used if jitter buffer is completely
1148// full.
1149bool VCMJitterBuffer::RecycleFramesUntilKeyFrame() {
1150  // First release incomplete frames, and only release decodable frames if there
1151  // are no incomplete ones.
1152  FrameList::iterator key_frame_it;
1153  bool key_frame_found = false;
1154  int dropped_frames = 0;
1155  dropped_frames += incomplete_frames_.RecycleFramesUntilKeyFrame(
1156      &key_frame_it, &free_frames_);
1157  key_frame_found = key_frame_it != incomplete_frames_.end();
1158  if (dropped_frames == 0) {
1159    dropped_frames += decodable_frames_.RecycleFramesUntilKeyFrame(
1160        &key_frame_it, &free_frames_);
1161    key_frame_found = key_frame_it != decodable_frames_.end();
1162  }
1163  drop_count_ += dropped_frames;
1164  if (dropped_frames) {
1165    WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCoding,
1166                 VCMId(vcm_id_, receiver_id_),
1167                 "Jitter buffer drop count:%u", drop_count_);
1168  }
1169  TRACE_EVENT_INSTANT0("webrtc", "JB::RecycleFramesUntilKeyFrame");
1170  if (key_frame_found) {
1171    // Reset last decoded state to make sure the next frame decoded is a key
1172    // frame, and start NACKing from here.
1173    last_decoded_state_.Reset();
1174    DropPacketsFromNackList(EstimatedLowSequenceNumber(*key_frame_it->second));
1175  } else if (decodable_frames_.empty()) {
1176    // All frames dropped. Reset the decoding state and clear missing sequence
1177    // numbers as we're starting fresh.
1178    last_decoded_state_.Reset();
1179    missing_sequence_numbers_.clear();
1180  }
1181  return key_frame_found;
1182}
1183
1184// Must be called under the critical section |crit_sect_|.
1185void VCMJitterBuffer::CountFrame(const VCMFrameBuffer& frame) {
1186  if (!frame.GetCountedFrame()) {
1187    // Ignore ACK frames.
1188    incoming_frame_count_++;
1189  }
1190
1191  if (frame.FrameType() == kVideoFrameKey) {
1192    TRACE_EVENT_ASYNC_STEP0("webrtc", "Video",
1193                            frame.TimeStamp(), "KeyComplete");
1194  } else {
1195    TRACE_EVENT_ASYNC_STEP0("webrtc", "Video",
1196                            frame.TimeStamp(), "DeltaComplete");
1197  }
1198
1199  // Update receive statistics. We count all layers, thus when you use layers
1200  // adding all key and delta frames might differ from frame count.
1201  if (frame.IsSessionComplete()) {
1202    ++receive_statistics_[frame.FrameType()];
1203  }
1204}
1205
1206void VCMJitterBuffer::UpdateAveragePacketsPerFrame(int current_number_packets) {
1207  if (frame_counter_ > kFastConvergeThreshold) {
1208    average_packets_per_frame_ = average_packets_per_frame_
1209              * (1 - kNormalConvergeMultiplier)
1210            + current_number_packets * kNormalConvergeMultiplier;
1211  } else if (frame_counter_ > 0) {
1212    average_packets_per_frame_ = average_packets_per_frame_
1213              * (1 - kFastConvergeMultiplier)
1214            + current_number_packets * kFastConvergeMultiplier;
1215    frame_counter_++;
1216  } else {
1217    average_packets_per_frame_ = current_number_packets;
1218    frame_counter_++;
1219  }
1220}
1221
1222// Must be called under the critical section |crit_sect_|.
1223void VCMJitterBuffer::CleanUpOldOrEmptyFrames() {
1224  drop_count_ +=
1225      decodable_frames_.CleanUpOldOrEmptyFrames(&last_decoded_state_,
1226          &free_frames_);
1227  drop_count_ +=
1228      incomplete_frames_.CleanUpOldOrEmptyFrames(&last_decoded_state_,
1229          &free_frames_);
1230  if (!last_decoded_state_.in_initial_state()) {
1231    DropPacketsFromNackList(last_decoded_state_.sequence_num());
1232  }
1233}
1234
1235// Must be called from within |crit_sect_|.
1236bool VCMJitterBuffer::IsPacketRetransmitted(const VCMPacket& packet) const {
1237  return missing_sequence_numbers_.find(packet.seqNum) !=
1238      missing_sequence_numbers_.end();
1239}
1240
1241// Must be called under the critical section |crit_sect_|. Should never be
1242// called with retransmitted frames, they must be filtered out before this
1243// function is called.
1244void VCMJitterBuffer::UpdateJitterEstimate(const VCMJitterSample& sample,
1245                                           bool incomplete_frame) {
1246  if (sample.latest_packet_time == -1) {
1247    return;
1248  }
1249  if (incomplete_frame) {
1250    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
1251                 VCMId(vcm_id_, receiver_id_), "Received incomplete frame "
1252                 "timestamp %u frame size %u at time %u",
1253                 sample.timestamp, sample.frame_size,
1254                 MaskWord64ToUWord32(sample.latest_packet_time));
1255  } else {
1256    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
1257                 VCMId(vcm_id_, receiver_id_), "Received complete frame "
1258                 "timestamp %u frame size %u at time %u",
1259                 sample.timestamp, sample.frame_size,
1260                 MaskWord64ToUWord32(sample.latest_packet_time));
1261  }
1262  UpdateJitterEstimate(sample.latest_packet_time, sample.timestamp,
1263                       sample.frame_size, incomplete_frame);
1264}
1265
1266// Must be called under the critical section crit_sect_. Should never be
1267// called with retransmitted frames, they must be filtered out before this
1268// function is called.
1269void VCMJitterBuffer::UpdateJitterEstimate(const VCMFrameBuffer& frame,
1270                                           bool incomplete_frame) {
1271  if (frame.LatestPacketTimeMs() == -1) {
1272    return;
1273  }
1274  // No retransmitted frames should be a part of the jitter
1275  // estimate.
1276  if (incomplete_frame) {
1277    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
1278                 VCMId(vcm_id_, receiver_id_),
1279                 "Received incomplete frame timestamp %u frame type %d "
1280                 "frame size %u at time %u, jitter estimate was %u",
1281                 frame.TimeStamp(), frame.FrameType(), frame.Length(),
1282                 MaskWord64ToUWord32(frame.LatestPacketTimeMs()),
1283                 EstimatedJitterMs());
1284  } else {
1285    WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
1286                 VCMId(vcm_id_, receiver_id_), "Received complete frame "
1287                 "timestamp %u frame type %d frame size %u at time %u, "
1288                 "jitter estimate was %u",
1289                 frame.TimeStamp(), frame.FrameType(), frame.Length(),
1290                 MaskWord64ToUWord32(frame.LatestPacketTimeMs()),
1291                 EstimatedJitterMs());
1292  }
1293  UpdateJitterEstimate(frame.LatestPacketTimeMs(), frame.TimeStamp(),
1294                       frame.Length(), incomplete_frame);
1295}
1296
1297// Must be called under the critical section |crit_sect_|. Should never be
1298// called with retransmitted frames, they must be filtered out before this
1299// function is called.
1300void VCMJitterBuffer::UpdateJitterEstimate(
1301    int64_t latest_packet_time_ms,
1302    uint32_t timestamp,
1303    unsigned int frame_size,
1304    bool incomplete_frame) {
1305  if (latest_packet_time_ms == -1) {
1306    return;
1307  }
1308  int64_t frame_delay;
1309  // Calculate the delay estimate
1310  WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCoding,
1311               VCMId(vcm_id_, receiver_id_),
1312               "Packet received and sent to jitter estimate with: "
1313               "timestamp=%u wall_clock=%u", timestamp,
1314               MaskWord64ToUWord32(latest_packet_time_ms));
1315  bool not_reordered = inter_frame_delay_.CalculateDelay(timestamp,
1316                                                      &frame_delay,
1317                                                      latest_packet_time_ms);
1318  // Filter out frames which have been reordered in time by the network
1319  if (not_reordered) {
1320    // Update the jitter estimate with the new samples
1321    jitter_estimate_.UpdateEstimate(frame_delay, frame_size, incomplete_frame);
1322  }
1323}
1324
1325bool VCMJitterBuffer::WaitForRetransmissions() {
1326  if (nack_mode_ == kNoNack) {
1327    // NACK disabled -> don't wait for retransmissions.
1328    return false;
1329  }
1330  // Evaluate if the RTT is higher than |high_rtt_nack_threshold_ms_|, and in
1331  // that case we don't wait for retransmissions.
1332  if (high_rtt_nack_threshold_ms_ >= 0 &&
1333      rtt_ms_ >= static_cast<unsigned int>(high_rtt_nack_threshold_ms_)) {
1334    return false;
1335  }
1336  return true;
1337}
1338}  // namespace webrtc
Note: See TracBrowser for help on using the repository browser.