Commit 6d9bb4a0 authored by yuweih's avatar yuweih Committed by Commit bot

[Chromoting] Implement DualBufferFrameConsumer

This class uses two buffers to decode mutiple frames. It will be used
by the chromoting client OpenGL renderer.

BUG=385924

Review-Url: https://codereview.chromium.org/2156713002
Cr-Commit-Position: refs/heads/master@{#407233}
parent 0e6a36ee
......@@ -85,6 +85,7 @@ source_set("unit_tests") {
"chromoting_client_runtime_unittest.cc",
"client_status_logger_unittest.cc",
"client_telemetry_logger_unittest.cc",
"dual_buffer_frame_consumer_unittest.cc",
"empty_cursor_filter_unittest.cc",
"key_event_mapper_unittest.cc",
"normalizing_input_filter_cros_unittest.cc",
......
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "remoting/client/dual_buffer_frame_consumer.h"
#include "base/bind.h"
#include "base/bind_helpers.h"
#include "base/location.h"
#include "base/logging.h"
#include "base/memory/ptr_util.h"
#include "base/threading/thread_task_runner_handle.h"
#include "third_party/webrtc/modules/desktop_capture/desktop_frame.h"
#include "third_party/webrtc/modules/desktop_capture/shared_desktop_frame.h"
namespace remoting {
DualBufferFrameConsumer::DualBufferFrameConsumer(
const RenderCallback& callback,
scoped_refptr<base::SingleThreadTaskRunner> task_runner,
protocol::FrameConsumer::PixelFormat format)
: callback_(callback),
task_runner_(task_runner),
pixel_format_(format),
weak_factory_(this) {
weak_ptr_ = weak_factory_.GetWeakPtr();
thread_checker_.DetachFromThread();
}
DualBufferFrameConsumer::~DualBufferFrameConsumer() {
DCHECK(thread_checker_.CalledOnValidThread());
}
void DualBufferFrameConsumer::RequestFullDesktopFrame() {
DCHECK(thread_checker_.CalledOnValidThread());
if (!buffers_[0]) {
return;
}
DCHECK(buffers_[0]->size().equals(buffers_[1]->size()));
// This creates a copy of buffers_[0] and merges area defined in
// |buffer_1_mask_| from buffers_[1] into the copy.
std::unique_ptr<webrtc::DesktopFrame> full_frame(
webrtc::BasicDesktopFrame::CopyOf(*buffers_[0]));
webrtc::DesktopRect desktop_rect =
webrtc::DesktopRect::MakeSize(buffers_[0]->size());
for (webrtc::DesktopRegion::Iterator i(buffer_1_mask_); !i.IsAtEnd();
i.Advance()) {
full_frame->CopyPixelsFrom(*buffers_[1], i.rect().top_left(),
i.rect());
}
full_frame->mutable_updated_region()->SetRect(desktop_rect);
RunRenderCallback(std::move(full_frame), base::Bind(&base::DoNothing));
}
std::unique_ptr<webrtc::DesktopFrame> DualBufferFrameConsumer::AllocateFrame(
const webrtc::DesktopSize& size) {
DCHECK(thread_checker_.CalledOnValidThread());
// Both buffers are reallocated whenever screen size changes.
if (!buffers_[0] || !buffers_[0]->size().equals(size)) {
buffers_[0] = webrtc::SharedDesktopFrame::Wrap(
base::WrapUnique(new webrtc::BasicDesktopFrame(size)));
buffers_[1] = webrtc::SharedDesktopFrame::Wrap(
base::WrapUnique(new webrtc::BasicDesktopFrame(size)));
buffer_1_mask_.Clear();
current_buffer_ = 0;
} else {
current_buffer_ = (current_buffer_ + 1) % 2;
}
return buffers_[current_buffer_]->Share();
}
void DualBufferFrameConsumer::DrawFrame(
std::unique_ptr<webrtc::DesktopFrame> frame,
const base::Closure& done) {
DCHECK(thread_checker_.CalledOnValidThread());
webrtc::SharedDesktopFrame* shared_frame =
reinterpret_cast<webrtc::SharedDesktopFrame*> (frame.get());
if (shared_frame->GetUnderlyingFrame() == buffers_[1]->GetUnderlyingFrame()) {
buffer_1_mask_.AddRegion(frame->updated_region());
} else if (shared_frame->GetUnderlyingFrame() ==
buffers_[0]->GetUnderlyingFrame()) {
buffer_1_mask_.Subtract(frame->updated_region());
}
RunRenderCallback(std::move(frame), done);
}
protocol::FrameConsumer::PixelFormat
DualBufferFrameConsumer::GetPixelFormat() {
return pixel_format_;
}
base::WeakPtr<DualBufferFrameConsumer> DualBufferFrameConsumer::GetWeakPtr() {
return weak_ptr_;
}
void DualBufferFrameConsumer::RunRenderCallback(
std::unique_ptr<webrtc::DesktopFrame> frame,
const base::Closure& done) {
if (!task_runner_) {
callback_.Run(std::move(frame), done);
return;
}
task_runner_->PostTask(
FROM_HERE, base::Bind(callback_, base::Passed(&frame), base::Bind(
base::IgnoreResult(&base::TaskRunner::PostTask),
base::ThreadTaskRunnerHandle::Get(), FROM_HERE, done)));
}
} // namespace remoting
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef REMOTING_CLIENT_DUAL_BUFFER_FRAME_CONSUMER_H_
#define REMOTING_CLIENT_DUAL_BUFFER_FRAME_CONSUMER_H_
#include "base/callback.h"
#include "base/macros.h"
#include "base/memory/ref_counted.h"
#include "base/memory/weak_ptr.h"
#include "base/single_thread_task_runner.h"
#include "base/threading/thread_checker.h"
#include "remoting/protocol/frame_consumer.h"
#include "third_party/webrtc/modules/desktop_capture/desktop_region.h"
namespace webrtc {
class SharedDesktopFrame;
} // namespace webrtc
namespace remoting {
// This class continuously uses two BasicDesktopFrame as buffer for decoding
// updated regions until the resolution is changed.
// This class should be used and destroyed on the same thread. If |task_runner|
// is null |callback| will be run directly upon the stack of DrawFrame,
// otherwise a task will be posted to feed the callback on the thread of
// |task_runner|.
// Only areas bound by updated_region() on the buffer are considered valid to
// |callback|. Please use RequestFullDesktopFrame() if you want to get a full
// desktop frame.
class DualBufferFrameConsumer : public protocol::FrameConsumer {
public:
// RenderCallback(decoded_frame, done)
// |done| should be run after it is rendered. Can be called on any thread.
using RenderCallback =
base::Callback<void(std::unique_ptr<webrtc::DesktopFrame>,
const base::Closure&)>;
DualBufferFrameConsumer(
const RenderCallback& callback,
scoped_refptr<base::SingleThreadTaskRunner> task_runner,
PixelFormat format);
~DualBufferFrameConsumer() override;
// Feeds the callback on the right thread with a BasicDesktopFrame that merges
// updates from buffer_[0] and buffer_[1]. Do nothing if no updates have
// received yet.
void RequestFullDesktopFrame();
// FrameConsumer interface.
std::unique_ptr<webrtc::DesktopFrame> AllocateFrame(
const webrtc::DesktopSize& size) override;
void DrawFrame(std::unique_ptr<webrtc::DesktopFrame> frame,
const base::Closure& done) override;
PixelFormat GetPixelFormat() override;
base::WeakPtr<DualBufferFrameConsumer> GetWeakPtr();
private:
void RunRenderCallback(std::unique_ptr<webrtc::DesktopFrame> frame,
const base::Closure& done);
std::unique_ptr<webrtc::SharedDesktopFrame> buffers_[2];
// Represents dirty regions that are currently in buffers_[1]. Will be used
// when calling RequestFullDesktopFrame() to construct the full desktop frame.
webrtc::DesktopRegion buffer_1_mask_;
int current_buffer_ = 0;
RenderCallback callback_;
scoped_refptr<base::SingleThreadTaskRunner> task_runner_;
PixelFormat pixel_format_;
base::ThreadChecker thread_checker_;
base::WeakPtr<DualBufferFrameConsumer> weak_ptr_;
base::WeakPtrFactory<DualBufferFrameConsumer> weak_factory_;
DISALLOW_COPY_AND_ASSIGN(DualBufferFrameConsumer);
};
} // namespace remoting
#endif // REMOTING_CLIENT_DUAL_BUFFER_FRAME_CONSUMER_H_
// Copyright 2016 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "remoting/client/dual_buffer_frame_consumer.h"
#include <memory>
#include "base/bind.h"
#include "base/bind_helpers.h"
#include "testing/gtest/include/gtest/gtest.h"
#include "third_party/webrtc/modules/desktop_capture/desktop_frame.h"
#include "third_party/webrtc/modules/desktop_capture/shared_desktop_frame.h"
namespace remoting {
namespace {
webrtc::DesktopFrame* GetUnderlyingFrame(
const std::unique_ptr<webrtc::DesktopFrame>& frame) {
return reinterpret_cast<webrtc::SharedDesktopFrame*>(frame.get())->
GetUnderlyingFrame();
}
void FillRGBARect(uint8_t r,
uint8_t g,
uint8_t b,
uint8_t a,
const webrtc::DesktopRect& rect,
webrtc::DesktopFrame* frame) {
for (int x = 0; x < rect.width(); x++) {
for (int y = 0; y < rect.height(); y++) {
uint8_t* data = frame->GetFrameDataAtPos(
rect.top_left().add(webrtc::DesktopVector(x, y)));
data[0] = r;
data[1] = g;
data[2] = b;
data[3] = a;
}
}
frame->mutable_updated_region()->SetRect(rect);
}
void CheckFrameColor(uint8_t r,
uint8_t g,
uint8_t b,
uint8_t a,
const webrtc::DesktopVector& pos,
const webrtc::DesktopFrame& frame) {
uint8_t* data = frame.GetFrameDataAtPos(pos);
EXPECT_EQ(r, data[0]);
EXPECT_EQ(g, data[1]);
EXPECT_EQ(b, data[2]);
EXPECT_EQ(a, data[3]);
}
} // namespace
class DualBufferFrameConsumerTest : public testing::Test {
public:
void SetUp() override;
protected:
std::unique_ptr<DualBufferFrameConsumer> consumer_;
std::unique_ptr<webrtc::DesktopFrame> received_frame_;
base::Closure done_closure_;
private:
void OnFrameReceived(std::unique_ptr<webrtc::DesktopFrame> frame,
const base::Closure& done);
};
void DualBufferFrameConsumerTest::SetUp() {
consumer_.reset(new DualBufferFrameConsumer(
base::Bind(&DualBufferFrameConsumerTest::OnFrameReceived,
base::Unretained(this)), nullptr,
protocol::FrameConsumer::FORMAT_RGBA));
}
void DualBufferFrameConsumerTest::OnFrameReceived(
std::unique_ptr<webrtc::DesktopFrame> frame,
const base::Closure& done) {
received_frame_ = std::move(frame);
done_closure_ = done;
}
TEST_F(DualBufferFrameConsumerTest, AllocateOneFrame) {
std::unique_ptr<webrtc::DesktopFrame> frame =
consumer_->AllocateFrame(webrtc::DesktopSize(16, 16));
ASSERT_TRUE(frame->size().equals(webrtc::DesktopSize(16, 16)));
webrtc::DesktopFrame* raw_frame = frame.get();
consumer_->DrawFrame(std::move(frame), base::Closure());
EXPECT_EQ(raw_frame, received_frame_.get());
}
TEST_F(DualBufferFrameConsumerTest, BufferRotation) {
webrtc::DesktopSize size16x16(16, 16);
std::unique_ptr<webrtc::DesktopFrame> frame =
consumer_->AllocateFrame(size16x16);
webrtc::DesktopFrame* underlying_frame_1 = GetUnderlyingFrame(frame);
consumer_->DrawFrame(std::move(frame), base::Closure());
frame = consumer_->AllocateFrame(size16x16);
webrtc::DesktopFrame* underlying_frame_2 = GetUnderlyingFrame(frame);
EXPECT_NE(underlying_frame_1, underlying_frame_2);
consumer_->DrawFrame(std::move(frame), base::Closure());
frame = consumer_->AllocateFrame(size16x16);
webrtc::DesktopFrame* underlying_frame_3 = GetUnderlyingFrame(frame);
EXPECT_EQ(underlying_frame_1, underlying_frame_3);
consumer_->DrawFrame(std::move(frame), base::Closure());
frame = consumer_->AllocateFrame(size16x16);
webrtc::DesktopFrame* underlying_frame_4 = GetUnderlyingFrame(frame);
EXPECT_EQ(underlying_frame_2, underlying_frame_4);
consumer_->DrawFrame(std::move(frame), base::Closure());
}
TEST_F(DualBufferFrameConsumerTest, DrawAndMergeFrames) {
webrtc::DesktopSize size2x2(2, 2);
// X means uninitialized color.
// Frame 1:
// RR
// RR
std::unique_ptr<webrtc::DesktopFrame> frame =
consumer_->AllocateFrame(size2x2);
FillRGBARect(0xff, 0, 0, 0xff, webrtc::DesktopRect::MakeXYWH(0, 0, 2, 2),
frame.get());
consumer_->DrawFrame(std::move(frame), base::Closure());
// Frame 2:
// GG
// XX
frame = consumer_->AllocateFrame(size2x2);
FillRGBARect(0, 0xff, 0, 0xff, webrtc::DesktopRect::MakeXYWH(0, 0, 2, 1),
frame.get());
consumer_->DrawFrame(std::move(frame), base::Closure());
// Merged Frame:
// GG
// RR
consumer_->RequestFullDesktopFrame();
ASSERT_TRUE(received_frame_->size().equals(size2x2));
CheckFrameColor(0, 0xff, 0, 0xff, webrtc::DesktopVector(0, 0),
*received_frame_);
CheckFrameColor(0xff, 0, 0, 0xff, webrtc::DesktopVector(0, 1),
*received_frame_);
CheckFrameColor(0, 0xff, 0, 0xff, webrtc::DesktopVector(1, 0),
*received_frame_);
CheckFrameColor(0xff, 0, 0, 0xff, webrtc::DesktopVector(1, 1),
*received_frame_);
// Frame 3:
// BX
// BX
frame = consumer_->AllocateFrame(size2x2);
FillRGBARect(0, 0, 0xff, 0xff, webrtc::DesktopRect::MakeXYWH(0, 0, 1, 2),
frame.get());
consumer_->DrawFrame(std::move(frame), base::Closure());
// Merged Frame:
// BG
// BR
consumer_->RequestFullDesktopFrame();
ASSERT_TRUE(received_frame_->size().equals(size2x2));
CheckFrameColor(0, 0, 0xff, 0xff, webrtc::DesktopVector(0, 0),
*received_frame_);
CheckFrameColor(0, 0, 0xff, 0xff, webrtc::DesktopVector(0, 1),
*received_frame_);
CheckFrameColor(0, 0xff, 0, 0xff, webrtc::DesktopVector(1, 0),
*received_frame_);
CheckFrameColor(0xff, 0, 0, 0xff, webrtc::DesktopVector(1, 1),
*received_frame_);
}
TEST_F(DualBufferFrameConsumerTest, ChangeScreenSizeAndReallocateBuffers) {
webrtc::DesktopSize size16x16(16, 16);
std::unique_ptr<webrtc::DesktopFrame> frame =
consumer_->AllocateFrame(size16x16);
webrtc::DesktopFrame* underlying_frame_1 = GetUnderlyingFrame(frame);
consumer_->DrawFrame(std::move(frame), base::Closure());
frame = consumer_->AllocateFrame(size16x16);
webrtc::DesktopFrame* underlying_frame_2 = GetUnderlyingFrame(frame);
EXPECT_NE(underlying_frame_1, underlying_frame_2);
consumer_->DrawFrame(std::move(frame), base::Closure());
webrtc::DesktopSize size32x32(32, 32);
frame = consumer_->AllocateFrame(size32x32);
webrtc::DesktopFrame* underlying_frame_3 = GetUnderlyingFrame(frame);
EXPECT_NE(underlying_frame_1, underlying_frame_3);
consumer_->DrawFrame(std::move(frame), base::Closure());
frame = consumer_->AllocateFrame(size32x32);
webrtc::DesktopFrame* underlying_frame_4 = GetUnderlyingFrame(frame);
EXPECT_NE(underlying_frame_2, underlying_frame_4);
consumer_->DrawFrame(std::move(frame), base::Closure());
}
} // namespace remoting
......@@ -27,33 +27,31 @@ void GlDesktop::SetCanvas(GlCanvas* canvas) {
return;
}
layer_.reset(new GlRenderLayer(kTextureId, canvas));
if (last_frame_) {
layer_->SetTexture(last_frame_->data(), last_frame_->size().width(),
last_frame_->size().height());
}
last_desktop_size_.set(0, 0);
}
void GlDesktop::SetVideoFrame(std::unique_ptr<webrtc::DesktopFrame> frame) {
if (layer_) {
if (!last_frame_ || !frame->size().equals(last_frame_->size())) {
layer_->SetTexture(frame->data(), frame->size().width(),
frame->size().height());
} else {
for (webrtc::DesktopRegion::Iterator i(frame->updated_region());
!i.IsAtEnd(); i.Advance()) {
const uint8_t* rect_start =
frame->GetFrameDataAtPos(i.rect().top_left());
layer_->UpdateTexture(
rect_start, i.rect().left(), i.rect().top(), i.rect().width(),
i.rect().height(), frame->stride());
}
if (!layer_) {
return;
}
if (!frame->size().equals(last_desktop_size_)) {
layer_->SetTexture(frame->data(), frame->size().width(),
frame->size().height());
last_desktop_size_.set(frame->size().width(), frame->size().height());
} else {
for (webrtc::DesktopRegion::Iterator i(frame->updated_region());
!i.IsAtEnd(); i.Advance()) {
const uint8_t* rect_start =
frame->GetFrameDataAtPos(i.rect().top_left());
layer_->UpdateTexture(
rect_start, i.rect().left(), i.rect().top(), i.rect().width(),
i.rect().height(), frame->stride());
}
}
last_frame_ = std::move(frame);
}
void GlDesktop::Draw() {
if (layer_ && last_frame_) {
if (layer_ && !last_desktop_size_.is_empty()) {
layer_->Draw(1.f);
}
}
......
......@@ -8,10 +8,10 @@
#include <memory>
#include "base/macros.h"
#include "third_party/webrtc/modules/desktop_capture/desktop_geometry.h"
namespace webrtc {
class DesktopFrame;
class DesktopRegion;
} // namespace webrtc
namespace remoting {
......@@ -25,10 +25,11 @@ class GlDesktop {
GlDesktop();
virtual ~GlDesktop();
// |frame| can be either a full frame or updated regions only frame.
void SetVideoFrame(std::unique_ptr<webrtc::DesktopFrame> frame);
// Sets the canvas on which the desktop will be drawn. Resumes the current
// state of the desktop to the context of the new canvas.
// Sets the canvas on which the desktop will be drawn. Caller must feed a
// full desktop frame after calling this function.
// If |canvas| is nullptr, nothing will happen when calling Draw().
void SetCanvas(GlCanvas* canvas);
......@@ -37,7 +38,7 @@ class GlDesktop {
private:
std::unique_ptr<GlRenderLayer> layer_;
std::unique_ptr<webrtc::DesktopFrame> last_frame_;
webrtc::DesktopSize last_desktop_size_;
DISALLOW_COPY_AND_ASSIGN(GlDesktop);
};
......
......@@ -51,13 +51,10 @@ SoftwareVideoRenderer::SoftwareVideoRenderer(protocol::FrameConsumer* consumer)
}
SoftwareVideoRenderer::SoftwareVideoRenderer(
scoped_refptr<base::SingleThreadTaskRunner> decode_task_runner,
protocol::FrameConsumer* consumer,
protocol::FrameStatsConsumer* stats_consumer)
: decode_task_runner_(decode_task_runner),
consumer_(consumer),
stats_consumer_(stats_consumer),
weak_factory_(this) {}
std::unique_ptr<protocol::FrameConsumer> consumer)
: SoftwareVideoRenderer(consumer.get()) {
owned_consumer_ = std::move(consumer);
}
SoftwareVideoRenderer::~SoftwareVideoRenderer() {
if (decoder_)
......
......@@ -44,17 +44,10 @@ class SoftwareVideoRenderer : public protocol::VideoRenderer,
// be called on the same thread.
explicit SoftwareVideoRenderer(protocol::FrameConsumer* consumer);
// Deprecated constructor. TODO(yuweih): remove.
// Constructs the renderer and initializes it immediately. Caller should not
// call Initialize() after using this constructor.
// All methods must be called on the same thread the renderer is created. The
// |decode_task_runner_| is used to decode the video packets. |consumer| and
// |stats_consumer| must outlive the renderer. |stats_consumer| may be
// nullptr, performance tracking is disabled in that case.
SoftwareVideoRenderer(
scoped_refptr<base::SingleThreadTaskRunner> decode_task_runner,
protocol::FrameConsumer* consumer,
protocol::FrameStatsConsumer* stats_consumer);
// Same as above, but take ownership of the |consumer|.
explicit SoftwareVideoRenderer(
std::unique_ptr<protocol::FrameConsumer> consumer);
~SoftwareVideoRenderer() override;
// VideoRenderer interface.
......@@ -77,7 +70,12 @@ class SoftwareVideoRenderer : public protocol::VideoRenderer,
const base::Closure& done);
scoped_refptr<base::SingleThreadTaskRunner> decode_task_runner_;
// |owned_consumer_| and |consumer_| should refer to the same object if
// |owned_consumer_| is not null.
std::unique_ptr<protocol::FrameConsumer> owned_consumer_;
protocol::FrameConsumer* const consumer_;
protocol::FrameStatsConsumer* stats_consumer_ = nullptr;
std::unique_ptr<VideoDecoder> decoder_;
......
......@@ -15,6 +15,7 @@
#include "base/message_loop/message_loop.h"
#include "base/run_loop.h"
#include "base/threading/thread.h"
#include "remoting/client/client_context.h"
#include "remoting/codec/video_encoder_verbatim.h"
#include "remoting/proto/video.pb.h"
#include "remoting/protocol/frame_consumer.h"
......@@ -134,17 +135,17 @@ void SetTrue(int* out) {
class SoftwareVideoRendererTest : public ::testing::Test {
public:
SoftwareVideoRendererTest() : decode_thread_("TestDecodeThread") {
decode_thread_.Start();
renderer_.reset(new SoftwareVideoRenderer(decode_thread_.task_runner(),
&frame_consumer_, nullptr));
SoftwareVideoRendererTest() : context_(nullptr) {
context_.Start();
renderer_.reset(new SoftwareVideoRenderer(&frame_consumer_));
renderer_->Initialize(context_, nullptr);
renderer_->OnSessionConfig(
*protocol::SessionConfig::ForTestWithVerbatimVideo());
}
protected:
base::MessageLoop message_loop_;
base::Thread decode_thread_;
ClientContext context_;
TestFrameConsumer frame_consumer_;
std::unique_ptr<SoftwareVideoRenderer> renderer_;
......
......@@ -20,6 +20,8 @@ namespace protocol {
class FrameConsumer {
public:
virtual ~FrameConsumer() {}
// List of supported pixel formats needed by various platforms.
enum PixelFormat {
FORMAT_BGRA, // Used by the Pepper plugin.
......@@ -37,7 +39,6 @@ class FrameConsumer {
protected:
FrameConsumer() {}
virtual ~FrameConsumer() {}
private:
DISALLOW_COPY_AND_ASSIGN(FrameConsumer);
......
......@@ -297,6 +297,8 @@
'client/client_telemetry_logger.cc',
'client/client_telemetry_logger.h',
'client/client_user_interface.h',
'client/dual_buffer_frame_consumer.cc',
'client/dual_buffer_frame_consumer.h',
'client/empty_cursor_filter.cc',
'client/empty_cursor_filter.h',
'client/key_event_mapper.cc',
......
......@@ -246,6 +246,7 @@
'client/chromoting_client_runtime_unittest.cc',
'client/client_status_logger_unittest.cc',
'client/client_telemetry_logger_unittest.cc',
'client/dual_buffer_frame_consumer_unittest.cc',
'client/empty_cursor_filter_unittest.cc',
'client/key_event_mapper_unittest.cc',
'client/normalizing_input_filter_cros_unittest.cc',
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment