Commit 5d967288 authored by shenghao's avatar shenghao Committed by Commit bot

RTC Video Encoder: Use capturer timestamp

Add the capturer timestamp as a parameter in VEA::BitstreamBufferReady() so
RTCVideoEncoder can use it.

BUG=350106
TEST=Verify that apprtc loopback works on minnie.
R=bbudge@chromium.org, posciak@chromium.org, sandersd@chromium.org, wfh@chromium.org, wuchengli@chromium.org

Review-Url: https://codereview.chromium.org/1996453003
Cr-Commit-Position: refs/heads/master@{#396786}
parent 2d827a87
......@@ -149,7 +149,8 @@ class RTCVideoEncoder::Impl
size_t output_buffer_size) override;
void BitstreamBufferReady(int32_t bitstream_buffer_id,
size_t payload_size,
bool key_frame) override;
bool key_frame,
base::TimeDelta timestamp) override;
void NotifyError(media::VideoEncodeAccelerator::Error error) override;
private:
......@@ -449,8 +450,9 @@ void RTCVideoEncoder::Impl::RequireBitstreamBuffers(
}
void RTCVideoEncoder::Impl::BitstreamBufferReady(int32_t bitstream_buffer_id,
size_t payload_size,
bool key_frame) {
size_t payload_size,
bool key_frame,
base::TimeDelta timestamp) {
DVLOG(3) << "Impl::BitstreamBufferReady(): "
"bitstream_buffer_id=" << bitstream_buffer_id
<< ", payload_size=" << payload_size
......@@ -471,14 +473,11 @@ void RTCVideoEncoder::Impl::BitstreamBufferReady(int32_t bitstream_buffer_id,
}
output_buffers_free_count_--;
// Use webrtc timestamps to ensure correct RTP sender behavior.
// TODO(hshi): obtain timestamp from the capturer, see crbug.com/350106.
const int64_t capture_time_us = rtc::TimeMicros();
// Derive the capture time (in ms) and RTP timestamp (in 90KHz ticks).
const int64_t capture_time_ms = capture_time_us / 1000;
// This is based on how input timestamps are calculated in
// webrtc/video/video_capture_input.cc.
const uint32_t rtp_timestamp =
static_cast<uint32_t>(capture_time_us * 90 / 1000);
static_cast<uint32_t>(timestamp.InMilliseconds()) * 90;
webrtc::EncodedImage image(
reinterpret_cast<uint8_t*>(output_buffer->memory()), payload_size,
......@@ -486,7 +485,7 @@ void RTCVideoEncoder::Impl::BitstreamBufferReady(int32_t bitstream_buffer_id,
image._encodedWidth = input_visible_size_.width();
image._encodedHeight = input_visible_size_.height();
image._timeStamp = rtp_timestamp;
image.capture_time_ms_ = capture_time_ms;
image.capture_time_ms_ = timestamp.InMilliseconds();
image._frameType =
(key_frame ? webrtc::kVideoFrameKey : webrtc::kVideoFrameDelta);
image._completeFrame = true;
......@@ -568,7 +567,7 @@ void RTCVideoEncoder::Impl::EncodeOneFrame() {
gfx::Rect(input_visible_size_), input_visible_size_,
reinterpret_cast<uint8_t*>(input_buffer->memory()),
input_buffer->mapped_size(), input_buffer->handle(), 0,
base::TimeDelta());
base::TimeDelta::FromMilliseconds(next_frame->ntp_time_ms()));
if (!frame.get()) {
LogAndNotifyError(FROM_HERE, "failed to create frame",
media::VideoEncodeAccelerator::kPlatformFailureError);
......
......@@ -253,7 +253,8 @@ class VEAEncoder final : public VideoTrackRecorder::Encoder,
size_t output_buffer_size) override;
void BitstreamBufferReady(int32_t bitstream_buffer_id,
size_t payload_size,
bool key_frame) override;
bool key_frame,
base::TimeDelta timestamp) override;
void NotifyError(media::VideoEncodeAccelerator::Error error) override;
private:
......@@ -425,7 +426,8 @@ void VEAEncoder::RequireBitstreamBuffers(unsigned int /*input_count*/,
void VEAEncoder::BitstreamBufferReady(int32_t bitstream_buffer_id,
size_t payload_size,
bool keyframe) {
bool keyframe,
base::TimeDelta timestamp) {
DVLOG(3) << __FUNCTION__;
DCHECK(encoding_task_runner_->BelongsToCurrentThread());
......
......@@ -441,12 +441,14 @@ void PepperVideoEncoderHost::RequireBitstreamBuffers(
}
void PepperVideoEncoderHost::BitstreamBufferReady(int32_t buffer_id,
size_t payload_size,
bool key_frame) {
size_t payload_size,
bool key_frame,
base::TimeDelta /* timestamp */) {
DCHECK(RenderThreadImpl::current());
DCHECK(shm_buffers_[buffer_id]->in_use);
shm_buffers_[buffer_id]->in_use = false;
// TODO: Pass timestamp. Tracked in crbug/613984.
host()->SendUnsolicitedReply(
pp_resource(),
PpapiPluginMsg_VideoEncoder_BitstreamBufferReady(
......
......@@ -71,7 +71,8 @@ class CONTENT_EXPORT PepperVideoEncoderHost
size_t output_buffer_size) override;
void BitstreamBufferReady(int32_t bitstream_buffer_id,
size_t payload_size,
bool key_frame) override;
bool key_frame,
base::TimeDelta timestamp) override;
void NotifyError(media::VideoEncodeAccelerator::Error error) override;
// ResourceHost implementation.
......
......@@ -487,7 +487,8 @@ void VideoEncoderShim::OnBitstreamBufferReady(
bool key_frame) {
DCHECK(RenderThreadImpl::current());
host_->BitstreamBufferReady(bitstream_buffer_id, payload_size, key_frame);
host_->BitstreamBufferReady(bitstream_buffer_id, payload_size, key_frame,
frame->timestamp());
}
void VideoEncoderShim::OnNotifyError(
......
......@@ -248,7 +248,8 @@ class ExternalVideoEncoder::VEAClientImpl
// to the Cast MAIN thread via the supplied callback.
void BitstreamBufferReady(int32_t bitstream_buffer_id,
size_t payload_size,
bool key_frame) final {
bool key_frame,
base::TimeDelta /* timestamp */) final {
DCHECK(task_runner_->RunsTasksOnCurrentThread());
if (bitstream_buffer_id < 0 ||
bitstream_buffer_id >= static_cast<int32_t>(output_buffers_.size())) {
......
......@@ -420,7 +420,7 @@ void AndroidVideoEncodeAccelerator::DequeueOutput() {
FROM_HERE,
base::Bind(&VideoEncodeAccelerator::Client::BitstreamBufferReady,
client_ptr_factory_->GetWeakPtr(), bitstream_buffer.id(), size,
key_frame));
key_frame, base::Time::Now() - base::Time()));
}
} // namespace media
......@@ -302,14 +302,16 @@ void GpuVideoEncodeAcceleratorHost::OnNotifyInputDone(int32_t frame_id) {
void GpuVideoEncodeAcceleratorHost::OnBitstreamBufferReady(
int32_t bitstream_buffer_id,
uint32_t payload_size,
bool key_frame) {
bool key_frame,
base::TimeDelta timestamp) {
DCHECK(CalledOnValidThread());
DVLOG(3) << "OnBitstreamBufferReady(): "
"bitstream_buffer_id="
<< bitstream_buffer_id << ", payload_size=" << payload_size
<< ", key_frame=" << key_frame;
if (client_)
client_->BitstreamBufferReady(bitstream_buffer_id, payload_size, key_frame);
client_->BitstreamBufferReady(bitstream_buffer_id, payload_size, key_frame,
timestamp);
}
void GpuVideoEncodeAcceleratorHost::OnNotifyError(Error error) {
......
......@@ -95,7 +95,8 @@ class GpuVideoEncodeAcceleratorHost
void OnNotifyInputDone(int32_t frame_id);
void OnBitstreamBufferReady(int32_t bitstream_buffer_id,
uint32_t payload_size,
bool key_frame);
bool key_frame,
base::TimeDelta timestamp);
void OnNotifyError(Error error);
scoped_refptr<gpu::GpuChannelHost> channel_;
......
......@@ -177,10 +177,11 @@ IPC_MESSAGE_ROUTED1(AcceleratedVideoEncoderHostMsg_NotifyInputDone,
int32_t /* frame_id */)
// Notify the renderer that an output buffer has been filled with encoded data.
IPC_MESSAGE_ROUTED3(AcceleratedVideoEncoderHostMsg_BitstreamBufferReady,
IPC_MESSAGE_ROUTED4(AcceleratedVideoEncoderHostMsg_BitstreamBufferReady,
int32_t /* bitstream_buffer_id */,
uint32_t /* payload_size */,
bool /* key_frame */)
bool /* key_frame */,
base::TimeDelta /* timestamp */)
// Report error condition.
IPC_MESSAGE_ROUTED1(AcceleratedVideoEncoderHostMsg_NotifyError,
......
......@@ -151,9 +151,10 @@ void GpuVideoEncodeAccelerator::RequireBitstreamBuffers(
void GpuVideoEncodeAccelerator::BitstreamBufferReady(
int32_t bitstream_buffer_id,
size_t payload_size,
bool key_frame) {
bool key_frame,
base::TimeDelta timestamp) {
Send(new AcceleratedVideoEncoderHostMsg_BitstreamBufferReady(
host_route_id_, bitstream_buffer_id, payload_size, key_frame));
host_route_id_, bitstream_buffer_id, payload_size, key_frame, timestamp));
}
void GpuVideoEncodeAccelerator::NotifyError(
......
......@@ -60,7 +60,8 @@ class GpuVideoEncodeAccelerator
size_t output_buffer_size) override;
void BitstreamBufferReady(int32_t bitstream_buffer_id,
size_t payload_size,
bool key_frame) override;
bool key_frame,
base::TimeDelta timestamp) override;
void NotifyError(media::VideoEncodeAccelerator::Error error) override;
// gpu::GpuCommandBufferStub::DestructionObserver implementation.
......
......@@ -665,8 +665,12 @@ void V4L2VideoEncodeAccelerator::Dequeue() {
<< ", key_frame=" << key_frame;
child_task_runner_->PostTask(
FROM_HERE,
base::Bind(&Client::BitstreamBufferReady, client_,
output_record.buffer_ref->id, output_size, key_frame));
base::Bind(
&Client::BitstreamBufferReady, client_,
output_record.buffer_ref->id, output_size, key_frame,
base::TimeDelta::FromMicroseconds(
dqbuf.timestamp.tv_usec +
dqbuf.timestamp.tv_sec * base::Time::kMicrosecondsPerSecond)));
output_record.at_device = false;
output_record.buffer_ref.reset();
free_output_buffers_.push_back(dqbuf.index);
......@@ -691,6 +695,10 @@ bool V4L2VideoEncodeAccelerator::EnqueueInputRecord() {
qbuf.index = index;
qbuf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
qbuf.m.planes = qbuf_planes;
qbuf.timestamp.tv_sec = static_cast<time_t>(frame->timestamp().InSeconds());
qbuf.timestamp.tv_usec =
frame->timestamp().InMicroseconds() -
frame->timestamp().InSeconds() * base::Time::kMicrosecondsPerSecond;
DCHECK_EQ(device_input_format_, frame->format());
for (size_t i = 0; i < input_planes_count_; ++i) {
......
......@@ -555,8 +555,9 @@ void VaapiVideoEncodeAccelerator::TryToReturnBitstreamBuffer() {
<< " id: " << buffer->id << " size: " << data_size;
child_task_runner_->PostTask(
FROM_HERE, base::Bind(&Client::BitstreamBufferReady, client_, buffer->id,
data_size, encode_job->keyframe));
FROM_HERE,
base::Bind(&Client::BitstreamBufferReady, client_, buffer->id, data_size,
encode_job->keyframe, encode_job->timestamp));
}
void VaapiVideoEncodeAccelerator::Encode(
......@@ -571,7 +572,7 @@ void VaapiVideoEncodeAccelerator::Encode(
base::Unretained(this), frame, force_keyframe));
}
bool VaapiVideoEncodeAccelerator::PrepareNextJob() {
bool VaapiVideoEncodeAccelerator::PrepareNextJob(base::TimeDelta timestamp) {
if (available_va_surface_ids_.size() < kMinSurfacesToEncode)
return false;
......@@ -584,6 +585,8 @@ bool VaapiVideoEncodeAccelerator::PrepareNextJob() {
return false;
}
current_encode_job_->timestamp = timestamp;
current_encode_job_->input_surface = new VASurface(
available_va_surface_ids_.back(), coded_size_,
vaapi_wrapper_->va_surface_format(), va_surface_release_cb_);
......@@ -619,7 +622,7 @@ void VaapiVideoEncodeAccelerator::EncodeFrameTask() {
if (state_ != kEncoding || encoder_input_queue_.empty())
return;
if (!PrepareNextJob()) {
if (!PrepareNextJob(encoder_input_queue_.front()->frame->timestamp())) {
DVLOGF(4) << "Not ready for next frame yet";
return;
}
......
......@@ -73,6 +73,8 @@ class MEDIA_GPU_EXPORT VaapiVideoEncodeAccelerator
// True if this job will produce a keyframe. Used to report
// to BitstreamBufferReady().
bool keyframe;
// Source timestamp.
base::TimeDelta timestamp;
EncodeJob();
~EncodeJob();
......@@ -118,7 +120,7 @@ class MEDIA_GPU_EXPORT VaapiVideoEncodeAccelerator
// Check if we have sufficient resources for a new encode job, claim them and
// fill current_encode_job_ with them.
// Return false if we cannot start a new job yet, true otherwise.
bool PrepareNextJob();
bool PrepareNextJob(base::TimeDelta timestamp);
// Begin a new frame, making it a keyframe if |force_keyframe| is true,
// updating current_pic_.
......
......@@ -774,7 +774,8 @@ class VEAClient : public VideoEncodeAccelerator::Client {
size_t output_buffer_size) override;
void BitstreamBufferReady(int32_t bitstream_buffer_id,
size_t payload_size,
bool key_frame) override;
bool key_frame,
base::TimeDelta timestamp) override;
void NotifyError(VideoEncodeAccelerator::Error error) override;
private:
......@@ -955,6 +956,9 @@ class VEAClient : public VideoEncodeAccelerator::Client {
// The timer used to feed the encoder with the input frames.
std::unique_ptr<base::RepeatingTimer> input_timer_;
// The timestamps for each frame in the order of CreateFrame() invocation.
std::queue<base::TimeDelta> frame_timestamps_;
};
VEAClient::VEAClient(TestStream* test_stream,
......@@ -1216,7 +1220,8 @@ void VEAClient::RequireBitstreamBuffers(unsigned int input_count,
void VEAClient::BitstreamBufferReady(int32_t bitstream_buffer_id,
size_t payload_size,
bool key_frame) {
bool key_frame,
base::TimeDelta timestamp) {
DCHECK(thread_checker_.CalledOnValidThread());
ASSERT_LE(payload_size, output_buffer_size_);
......@@ -1228,6 +1233,10 @@ void VEAClient::BitstreamBufferReady(int32_t bitstream_buffer_id,
if (state_ == CS_FINISHED || state_ == CS_VALIDATED)
return;
ASSERT_FALSE(frame_timestamps_.empty());
ASSERT_EQ(timestamp, frame_timestamps_.front());
frame_timestamps_.pop();
encoded_stream_size_since_last_check_ += payload_size;
const uint8_t* stream_ptr = static_cast<const uint8_t*>(shm->memory());
......@@ -1367,6 +1376,7 @@ void VEAClient::FeedEncoderWithOneInput() {
int32_t input_id;
scoped_refptr<media::VideoFrame> video_frame =
PrepareInputFrame(pos_in_input_stream_, &input_id);
frame_timestamps_.push(video_frame->timestamp());
pos_in_input_stream_ += test_stream_->aligned_buffer_size;
bool force_keyframe = false;
......
......@@ -444,8 +444,9 @@ void VTVideoEncodeAccelerator::ReturnBitstreamBuffer(
if (encode_output->info & VideoToolboxGlue::kVTEncodeInfo_FrameDropped) {
DVLOG(2) << " frame dropped";
client_task_runner_->PostTask(
FROM_HERE, base::Bind(&Client::BitstreamBufferReady, client_,
buffer_ref->id, 0, false));
FROM_HERE,
base::Bind(&Client::BitstreamBufferReady, client_, buffer_ref->id, 0,
false, base::Time::Now() - base::Time()));
return;
}
......@@ -467,8 +468,9 @@ void VTVideoEncodeAccelerator::ReturnBitstreamBuffer(
bitrate_adjuster_.Update(used_buffer_size);
client_task_runner_->PostTask(
FROM_HERE, base::Bind(&Client::BitstreamBufferReady, client_,
buffer_ref->id, used_buffer_size, keyframe));
FROM_HERE,
base::Bind(&Client::BitstreamBufferReady, client_, buffer_ref->id,
used_buffer_size, keyframe, base::Time::Now() - base::Time()));
}
bool VTVideoEncodeAccelerator::ResetCompressionSession() {
......
......@@ -131,9 +131,8 @@ void FakeVideoEncodeAccelerator::DoBitstreamBufferReady(
int32_t bitstream_buffer_id,
size_t payload_size,
bool key_frame) const {
client_->BitstreamBufferReady(bitstream_buffer_id,
payload_size,
key_frame);
client_->BitstreamBufferReady(bitstream_buffer_id, payload_size, key_frame,
base::Time::Now() - base::Time());
}
} // namespace media
......@@ -79,9 +79,11 @@ class MEDIA_EXPORT VideoEncodeAccelerator {
// |bitstream_buffer_id| is the id of the buffer that is ready.
// |payload_size| is the byte size of the used portion of the buffer.
// |key_frame| is true if this delivered frame is a keyframe.
// |timestamp| is the same timestamp as in VideoFrame passed to Encode().
virtual void BitstreamBufferReady(int32_t bitstream_buffer_id,
size_t payload_size,
bool key_frame) = 0;
bool key_frame,
base::TimeDelta timestamp) = 0;
// Error notification callback. Note that errors in Initialize() will not be
// reported here, but will instead be indicated by a false return value
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment